hexsha
stringlengths 40
40
| size
int64 3
1.03M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
972
| max_stars_repo_name
stringlengths 6
130
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
972
| max_issues_repo_name
stringlengths 6
130
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
116k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
972
| max_forks_repo_name
stringlengths 6
130
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 3
1.03M
| avg_line_length
float64 1.13
941k
| max_line_length
int64 2
941k
| alphanum_fraction
float64 0
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
88ba5c31c311e6ee1b48bcee4d08a7206501b01d
| 2,745
|
py
|
Python
|
opendis/PduFactory.py
|
DMOC-C/DIS-PDU
|
af5c93b2081298e0c453592f62c8cc9484e3ded0
|
[
"BSD-2-Clause"
] | null | null | null |
opendis/PduFactory.py
|
DMOC-C/DIS-PDU
|
af5c93b2081298e0c453592f62c8cc9484e3ded0
|
[
"BSD-2-Clause"
] | null | null | null |
opendis/PduFactory.py
|
DMOC-C/DIS-PDU
|
af5c93b2081298e0c453592f62c8cc9484e3ded0
|
[
"BSD-2-Clause"
] | null | null | null |
__author__ = "mcgredo"
__date__ = "$Jun 25, 2015 11:31:42 AM$"
from .DataInputStream import DataInputStream
from .dis7 import *
from io import BytesIO
import binascii
import io
PduTypeDecoders = {
1 : EntityStatePdu
, 2 : FirePdu
, 3 : DetonationPdu
, 4 : CollisionPdu
, 5 : ServiceRequestPdu
, 6 : CollisionElasticPdu
, 7 : ResupplyReceivedPdu
, 9 : RepairCompletePdu
, 10 : RepairResponsePdu
, 11 : CreateEntityPdu
, 12 : RemoveEntityPdu
, 13 : StartResumePdu
, 14 : StopFreezePdu
, 15 : AcknowledgePdu
, 16 : ActionRequestPdu
, 17 : ActionResponsePdu
, 18 : DataQueryPdu
, 19 : SetDataPdu
, 20 : DataPdu
, 21 : EventReportPdu
, 22 : CommentPdu
, 23 : ElectronicEmissionsPdu
, 24 : DesignatorPdu
, 25 : TransmitterPdu
, 26 : SignalPdu
, 27 : ReceiverPdu
, 29 : UaPdu
, 31 : IntercomSignalPdu
, 32 : IntercomControlPdu
, 36 : IsPartOfPdu
, 37 : MinefieldStatePdu
, 40 : MinefieldResponseNackPdu
, 41 : PointObjectStatePdu
, 43 : PointObjectStatePdu
, 44 : LinearObjectStatePdu
, 45 : ArealObjectStatePdu
, 51 : CreateEntityReliablePdu
, 52 : RemoveEntityReliablePdu
, 54 : StopFreezeReliablePdu
, 55 : AcknowledgeReliablePdu
, 56 : ActionRequestReliablePdu
, 57 : ActionResponseReliablePdu
, 58 : DataQueryReliablePdu
, 59 : SetDataReliablePdu
, 60 : DataReliablePdu
, 61 : EventReportReliablePdu
, 62 : CommentReliablePdu
, 63 : RecordQueryReliablePdu
, 66 : CollisionElasticPdu
, 67 : EntityStateUpdatePdu
, 69 : EntityDamageStatusPdu
}
def getPdu(inputStream):
# The PDU type enumeration is in the 3rd slot
inputStream.read_unsigned_byte()
inputStream.read_unsigned_byte()
pduType = inputStream.read_byte()
inputStream.stream.seek(-3, 1) # rewind
if pduType in PduTypeDecoders.keys():
Decoder = PduTypeDecoders[pduType]
pdu = Decoder()
pdu.parse(inputStream)
return pdu
# Punt and return none if we don't have a match on anything
# print("Unable to find a PDU corresponding to PduType {}".format(pduType))
return None
def createPdu(data):
""" Create a PDU of the correct type when passed an array of binary data
input: a bytebuffer of DIS data
output: a python DIS pdu instance of the correct class"""
memoryStream = BytesIO(data)
inputStream = DataInputStream(memoryStream)
return getPdu(inputStream)
def createPduFromFilePath(filePath):
""" Utility written for unit tests, but could have other uses too."""
f = io.open(filePath, "rb")
inputStream = DataInputStream(f)
return getPdu(inputStream)
| 27.45
| 79
| 0.669945
|
ee2e4f3bb64ab994c78bb6465ce8c4ca5d85cfc0
| 6,553
|
py
|
Python
|
player.py
|
PTNobel/musicctl
|
c311d025176564dc1ed09d5143519bbdf1b0f2a5
|
[
"MIT"
] | 1
|
2017-09-01T06:28:36.000Z
|
2017-09-01T06:28:36.000Z
|
player.py
|
PTNobel/musicctl
|
c311d025176564dc1ed09d5143519bbdf1b0f2a5
|
[
"MIT"
] | null | null | null |
player.py
|
PTNobel/musicctl
|
c311d025176564dc1ed09d5143519bbdf1b0f2a5
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python3
# A python3 port of musicctl.sh.
import time
import os
import sys
import re
import subprocess
import process
# warning() functions like print, except it prefixes everything and prints to
# stderr.
def warning(*objs, prefix='WARNING: '):
printed_list = str(prefix)
for i in objs:
printed_list += str(i)
print(printed_list, file=sys.stderr)
def get_keys(list_of_classes):
for i in list_of_classes:
print("For player " + str(i) +
" the following commands are available:")
for j in sorted(i.commands.keys()):
print(" " + j)
exit(0)
class mpd:
__name__ = 'mpd'
def __init__(self):
self.commands = {'play': self.pause, 'pause': self.pause,
'back': self.back, 'next': self.next,
'quit': self.stop, 'stop': self.stop,
'is_playing': self.is_playing_shell_wrapper}
def _call_mpc(self, *option):
devnull = open('/dev/null')
subprocess.call(['mpc', *option], stdout=devnull.buffer)
devnull.close()
def __repr__(self):
return self.__name__
def pause(self):
self._call_mpc('toggle')
def back(self):
self._call_mpc('prev')
def next(self):
self._call_mpc('next')
def stop(self):
self._call_mpc('stop')
def is_playing_shell_wrapper(self):
if self.is_playing():
exit(0)
else:
exit(1)
def is_playing(self):
try:
is_playing_present = b"playing" in subprocess.check_output(
['mpc', 'status'])
except subprocess.CalledProcessError:
is_playing_present = False
return is_playing_present
# Since the easiest way to control mopidy is through its mpd implementation,
# the mopidy class inherets its implementation from the mpd class.
class mopidy(mpd):
__name__ = 'mopidy'
class pianobar:
__name__ = 'pianobar'
def __init__(self):
self.commands = {'play': self.pause, 'pause': self.pause,
'back': self.like, 'next': self.next,
'quit': self.stop, 'stop': self.stop,
'tired': self.tired, 'like': self.like,
'dislike': self.dislike,
'is_playing': self.is_playing_shell_wrapper}
def __repr__(self):
return self.__name__
def _call_pianoctl(self, option):
subprocess.call(
['pianoctl', option])
def pause(self):
self._call_pianoctl('p')
def like(self):
self._call_pianoctl('+')
def dislike(self):
self._call_pianoctl('-')
def next(self):
self._call_pianoctl('n')
def stop(self):
self._call_pianoctl('q')
# if pianobar isn't responding kill it.
time.sleep(1)
process.update_buffers()
if process.is_comm_running("pianobar"):
subprocess.call(['kill'] + process.get_pids_of_comm('pianobar'))
def tired(self):
self._call_pianoctl('t')
def is_playing_shell_wrapper(self):
if self.is_playing():
exit(0)
else:
exit(1)
def is_playing(self):
log1_time_stamp, success1 = self._get_time()
time.sleep(2)
log2_time_stamp, success2 = self._get_time()
if not (success1 and success2):
output = False
if log1_time_stamp == log2_time_stamp:
output = False
else:
output = True
return output
def _get_time(self, tries=0):
"""Reads the pianobar time, and returns a tuple of str '##:##/##:##'
and a boolean which reflects whether it matches the regex"""
log = open(os.path.expanduser('~/.config/pianobar/out'), 'r')
time_stamp = log.read()[-12:-1]
log.close()
if re.match(r'^\d{2}:\d{2}/\d{2}:\d{2}$', time_stamp):
return (time_stamp, True)
elif tries < 3:
time.sleep(1)
return self._get_time(tries+1)
else:
return (time_stamp, False)
class playerctl:
__name__ = 'playerctl'
def __init__(self):
self.commands = {'play': self.pause, 'pause': self.pause,
'back': self.back, 'next': self.next,
'quit': self.stop, 'stop': self.stop,
'is_playing': self.is_playing_shell_wrapper}
def __repr__(self):
return self.__name__
def _call_playerctl(self, option):
subprocess.call(
['playerctl', option])
def pause(self):
self._call_playerctl('play-pause')
def back(self):
self._call_playerctl('previous')
def next(self):
self._call_playerctl('next')
def stop(self):
self._call_playerctl('stop')
def is_playing_shell_wrapper(self):
if self.is_playing():
exit(0)
else:
exit(1)
def is_playing(self):
try:
is_playing_present = b"Playing" in subprocess.check_output(
['playerctl', 'status'])
except subprocess.CalledProcessError:
is_playing_present = False
return is_playing_present
def current_player():
list_of_process_names = process.get_comms()
# pianobar get priority over mpd, unless mpd is playing.
if 'mpd' in list_of_process_names:
if 'pianobar' in list_of_process_names:
if b'playing' in subprocess.check_output(['mpc', 'status']):
output = mpd()
else:
output = pianobar()
else:
output = mpd()
elif 'pianobar' in list_of_process_names:
output = pianobar()
elif 'mopidy' in list_of_process_names:
output = mopidy()
else:
output = playerctl()
return output
def is_playing():
return current_player().is_playing()
def pause():
current_player().commands['pause']()
def stop():
current_player().commands['stop']()
def back():
current_player().commands['back']()
def next_song():
current_player().commands['next']()
def print_keys(list_of_classes=[mopidy, mpd, pianobar, playerctl]):
for i in list_of_classes:
player = i()
print("For player " + player.__repr__() +
" the following commands are available:")
for j in sorted(player.commands.keys()):
print(" " + j)
if __name__ == '__main__':
print('Please don\'t do this.')
| 25.901186
| 77
| 0.573783
|
4a09b3ea6cd563a8a426c8fdec82bda8183d1960
| 7,546
|
py
|
Python
|
cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_lib_keychain_act.py
|
CiscoDevNet/ydk-py
|
073731fea50694d0bc6cd8ebf10fec308dcc0aa9
|
[
"ECL-2.0",
"Apache-2.0"
] | 177
|
2016-03-15T17:03:51.000Z
|
2022-03-18T16:48:44.000Z
|
cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_lib_keychain_act.py
|
CiscoDevNet/ydk-py
|
073731fea50694d0bc6cd8ebf10fec308dcc0aa9
|
[
"ECL-2.0",
"Apache-2.0"
] | 18
|
2016-03-30T10:45:22.000Z
|
2020-07-14T16:28:13.000Z
|
cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_lib_keychain_act.py
|
CiscoDevNet/ydk-py
|
073731fea50694d0bc6cd8ebf10fec308dcc0aa9
|
[
"ECL-2.0",
"Apache-2.0"
] | 85
|
2016-03-16T20:38:57.000Z
|
2022-02-22T04:26:02.000Z
|
""" Cisco_IOS_XR_lib_keychain_act
This module contains a collection of YANG definitions
for Cisco IOS\-XR action package configuration.
Copyright (c) 2017 by Cisco Systems, Inc.
All rights reserved.
"""
import sys
from collections import OrderedDict
from ydk.types import Entity as _Entity_
from ydk.types import EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64
from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64
from ydk.filters import YFilter
from ydk.errors import YError, YModelError
from ydk.errors.error_handler import handle_type_error as _handle_type_error
class MasterKeyAdd(_Entity_):
"""
To add a new master key
.. attribute:: input
**type**\: :py:class:`Input <ydk.models.cisco_ios_xr.Cisco_IOS_XR_lib_keychain_act.MasterKeyAdd.Input>`
"""
_prefix = 'lib-keychain-act'
_revision = '2017-04-17'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(MasterKeyAdd, self).__init__()
self._top_entity = None
self.yang_name = "master-key-add"
self.yang_parent_name = "Cisco-IOS-XR-lib-keychain-act"
self.is_top_level_class = True
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict()
self.input = MasterKeyAdd.Input()
self.input.parent = self
self._children_name_map["input"] = "input"
self._segment_path = lambda: "Cisco-IOS-XR-lib-keychain-act:master-key-add"
self._is_frozen = True
class Input(_Entity_):
"""
.. attribute:: new_key
New master key to be added
**type**\: str
"""
_prefix = 'lib-keychain-act'
_revision = '2017-04-17'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(MasterKeyAdd.Input, self).__init__()
self.yang_name = "input"
self.yang_parent_name = "master-key-add"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('new_key', (YLeaf(YType.str, 'new-key'), ['str'])),
])
self.new_key = None
self._segment_path = lambda: "input"
self._absolute_path = lambda: "Cisco-IOS-XR-lib-keychain-act:master-key-add/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(MasterKeyAdd.Input, ['new_key'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_lib_keychain_act as meta
return meta._meta_table['MasterKeyAdd.Input']['meta_info']
def clone_ptr(self):
self._top_entity = MasterKeyAdd()
return self._top_entity
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_lib_keychain_act as meta
return meta._meta_table['MasterKeyAdd']['meta_info']
class MasterKeyDelete(_Entity_):
"""
Remove Master key
"""
_prefix = 'lib-keychain-act'
_revision = '2017-04-17'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(MasterKeyDelete, self).__init__()
self._top_entity = None
self.yang_name = "master-key-delete"
self.yang_parent_name = "Cisco-IOS-XR-lib-keychain-act"
self.is_top_level_class = True
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict()
self._segment_path = lambda: "Cisco-IOS-XR-lib-keychain-act:master-key-delete"
self._is_frozen = True
def clone_ptr(self):
self._top_entity = MasterKeyDelete()
return self._top_entity
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_lib_keychain_act as meta
return meta._meta_table['MasterKeyDelete']['meta_info']
class MasterKeyUpdate(_Entity_):
"""
To update master key
.. attribute:: input
**type**\: :py:class:`Input <ydk.models.cisco_ios_xr.Cisco_IOS_XR_lib_keychain_act.MasterKeyUpdate.Input>`
"""
_prefix = 'lib-keychain-act'
_revision = '2017-04-17'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(MasterKeyUpdate, self).__init__()
self._top_entity = None
self.yang_name = "master-key-update"
self.yang_parent_name = "Cisco-IOS-XR-lib-keychain-act"
self.is_top_level_class = True
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict()
self.input = MasterKeyUpdate.Input()
self.input.parent = self
self._children_name_map["input"] = "input"
self._segment_path = lambda: "Cisco-IOS-XR-lib-keychain-act:master-key-update"
self._is_frozen = True
class Input(_Entity_):
"""
.. attribute:: old_key
key already added/key to be replaced
**type**\: str
**mandatory**\: True
.. attribute:: new_key
New master key to be added
**type**\: str
**mandatory**\: True
"""
_prefix = 'lib-keychain-act'
_revision = '2017-04-17'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(MasterKeyUpdate.Input, self).__init__()
self.yang_name = "input"
self.yang_parent_name = "master-key-update"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('old_key', (YLeaf(YType.str, 'old-key'), ['str'])),
('new_key', (YLeaf(YType.str, 'new-key'), ['str'])),
])
self.old_key = None
self.new_key = None
self._segment_path = lambda: "input"
self._absolute_path = lambda: "Cisco-IOS-XR-lib-keychain-act:master-key-update/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(MasterKeyUpdate.Input, ['old_key', 'new_key'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_lib_keychain_act as meta
return meta._meta_table['MasterKeyUpdate.Input']['meta_info']
def clone_ptr(self):
self._top_entity = MasterKeyUpdate()
return self._top_entity
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_lib_keychain_act as meta
return meta._meta_table['MasterKeyUpdate']['meta_info']
| 29.476563
| 126
| 0.601246
|
361449cb9e4ab14c2dcab058a8171bafbbff0ef7
| 2,813
|
py
|
Python
|
data/external/repositories_2to3/132160/kaggle-ndsb-master/configurations/featharalick_cr4_ds_4stage_big.py
|
Keesiu/meta-kaggle
|
87de739aba2399fd31072ee81b391f9b7a63f540
|
[
"MIT"
] | null | null | null |
data/external/repositories_2to3/132160/kaggle-ndsb-master/configurations/featharalick_cr4_ds_4stage_big.py
|
Keesiu/meta-kaggle
|
87de739aba2399fd31072ee81b391f9b7a63f540
|
[
"MIT"
] | null | null | null |
data/external/repositories_2to3/132160/kaggle-ndsb-master/configurations/featharalick_cr4_ds_4stage_big.py
|
Keesiu/meta-kaggle
|
87de739aba2399fd31072ee81b391f9b7a63f540
|
[
"MIT"
] | 1
|
2019-12-04T08:23:33.000Z
|
2019-12-04T08:23:33.000Z
|
import numpy as np
import theano
import theano.tensor as T
import lasagne as nn
import data
import load
import nn_plankton
import dihedral
import tmp_dnn
import tta
features = [
# "hu",
# "tutorial",
"haralick",
# "aaronmoments",
# "lbp",
# "pftas",
# "zernike_moments",
# "image_size",
]
batch_size = 128
chunk_size = 32768
num_chunks_train = 240
momentum = 0.9
learning_rate_schedule = {
0: 0.001,
100: 0.0001,
200: 0.00001,
}
validate_every = 40
save_every = 40
sdir = "/mnt/storage/users/sedielem/git/kaggle-plankton/predictions/"
train_pred_file = sdir+"train--cr4_ds_4stage_big--cr4_ds_4stage_big-paard-20150227-230849--avg-probs.npy"
valid_pred_file = sdir+"valid--cr4_ds_4stage_big--cr4_ds_4stage_big-paard-20150227-230849--avg-probs.npy"
test_pred_file = sdir+"test--cr4_ds_4stage_big--cr4_ds_4stage_big-paard-20150227-230849--avg-probs.npy"
data_loader = load.PredictionsWithFeaturesDataLoader(
features = features,
train_pred_file=train_pred_file,
valid_pred_file=valid_pred_file,
test_pred_file=test_pred_file,
num_chunks_train=num_chunks_train,
chunk_size=chunk_size)
create_train_gen = lambda: data_loader.create_random_gen()
create_eval_train_gen = lambda: data_loader.create_fixed_gen("train")
create_eval_valid_gen = lambda: data_loader.create_fixed_gen("valid")
create_eval_test_gen = lambda: data_loader.create_fixed_gen("test")
def build_model():
l0 = nn.layers.InputLayer((batch_size, data.num_classes))
l0_size = nn.layers.InputLayer((batch_size, 52))
l1_size = nn.layers.DenseLayer(l0_size, num_units=80, W=nn_plankton.Orthogonal('relu'), b=nn.init.Constant(0.1))
l2_size = nn.layers.DenseLayer(l1_size, num_units=80, W=nn_plankton.Orthogonal('relu'), b=nn.init.Constant(0.1))
l3_size = nn.layers.DenseLayer(l2_size, num_units=data.num_classes, W=nn_plankton.Orthogonal(), b=nn.init.Constant(0.1), nonlinearity=None)
l1 = nn_plankton.NonlinLayer(l0, T.log)
ltot = nn.layers.ElemwiseSumLayer([l1, l3_size])
# norm_by_sum = lambda x: x / x.sum(1).dimshuffle(0, "x")
lout = nn_plankton.NonlinLayer(ltot, nonlinearity=T.nnet.softmax)
return [l0, l0_size], lout
def build_objective(l_ins, l_out):
reg_param = 0.0002
alpha = 0. # 0 -> L2 1-> L1
print("regu", reg_param, alpha)
# lambda_reg = 0.005
params = nn.layers.get_all_non_bias_params(l_out)
# reg_term = sum(T.sum(p**2) for p in params)
L2 = sum(T.sum(p**2) for p in params)
L1 = sum(T.sum(T.abs_(p)) for p in params)
def loss(y, t):
return nn_plankton.log_loss(y, t) + reg_param*(alpha * L1 + (1-alpha) * L2)
return nn.objectives.Objective(l_out, loss_function=loss)
| 31.255556
| 144
| 0.696765
|
127882f86e77326c95d91949f4aec66e2c1fd88e
| 2,068
|
py
|
Python
|
lens/flags.py
|
POFK/LensFinder
|
56a8cb3f99a68b5922e0a8c76e1cee27ef75a09e
|
[
"MIT"
] | null | null | null |
lens/flags.py
|
POFK/LensFinder
|
56a8cb3f99a68b5922e0a8c76e1cee27ef75a09e
|
[
"MIT"
] | 4
|
2019-06-25T08:37:45.000Z
|
2019-07-17T03:09:26.000Z
|
lens/flags.py
|
POFK/LensFinder
|
56a8cb3f99a68b5922e0a8c76e1cee27ef75a09e
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# coding=utf-8
import argparse
import os
import torch
parser = argparse.ArgumentParser(description='PyTorch Example')
parser.add_argument('--num_class', type=int, default=2, metavar='N')
parser.add_argument('--crop_range', type=int, default=84, metavar='N')
parser.add_argument('--batch_size', type=int, default=128, metavar='N',
help='input batch size for training (default: 128)')
parser.add_argument('--test_batch_size', type=int, default=128 * 4, metavar='N',
help='input batch size for testing (default: 512)')
parser.add_argument('--epochs', type=int, default=10, metavar='N',
help='number of epochs to train (default: 10)')
parser.add_argument('--epoch', type=int, default=0, metavar='N',
help='first epoch of training or the epoch used to evalation(default: 10)')
parser.add_argument('--lr', type=float, default=1e-5, metavar='LR',
help='learning rate (default: 0.01)')
parser.add_argument('--weight_decay', type=float, default=0.0, metavar='WD',
help='weight decay (default: 0)')
parser.add_argument('--no_cuda', action='store_true', default=False,
help='disables CUDA training')
parser.add_argument('--seed', type=int, default=1, metavar='S',
help='random seed (default: 1)')
parser.add_argument('--name', type=str, default='test', help='')
parser.add_argument('--mode', type=str, default='train', help='')
parser.add_argument('--model_dir', type=str, default='model',
help='For Saving the current Model')
parser.add_argument('--log_dir', type=str, default='log',
help='For Saving the current log information')
parser.add_argument('--base_dir',
type=str,
default='/data/storage1/LensFinder',
help='base Dir')
args = parser.parse_args()
use_cuda = not args.no_cuda and torch.cuda.is_available()
#args.base_dir = os.path.join(args.base_dir, args.name)
args.use_cuda = use_cuda
| 51.7
| 95
| 0.64265
|
7469a35bb02445e469a3fff6c46a32d2ac364f1b
| 23,155
|
py
|
Python
|
backend/AXIOME3_app/datahandle/views.py
|
neufeld/AXIOME3-GUI
|
80b87753b47fab116324b4f0e4151c21ab3b1725
|
[
"BSD-3-Clause"
] | 2
|
2021-02-25T16:59:12.000Z
|
2021-02-25T20:06:15.000Z
|
backend/AXIOME3_app/datahandle/views.py
|
neufeld/AXIOME3-GUI
|
80b87753b47fab116324b4f0e4151c21ab3b1725
|
[
"BSD-3-Clause"
] | 7
|
2020-11-18T08:05:52.000Z
|
2022-02-17T20:45:10.000Z
|
backend/AXIOME3_app/datahandle/views.py
|
neufeld/AXIOME3-GUI
|
80b87753b47fab116324b4f0e4151c21ab3b1725
|
[
"BSD-3-Clause"
] | null | null | null |
from flask import Blueprint, request, Response, current_app
import uuid
import os
#from werkzeug import secure_filename
# For console debugging
import sys
# Custom modules
from AXIOME3_app.datahandle import config_generator
from AXIOME3_app.datahandle import (
luigi_prep_helper,
input_upload_helper,
denoise_helper,
taxonomic_classification_helper,
analysis_helper,
extension_helper
)
# Celery task
from AXIOME3_app.tasks.pipeline_config_generator import config_task
from AXIOME3_app.tasks.input_upload import import_data_task
from AXIOME3_app.tasks.denoise import denoise_task
from AXIOME3_app.tasks.taxonomic_classification import taxonomic_classification_task
from AXIOME3_app.tasks.analysis import analysis_task
from AXIOME3_app.tasks.pcoa import pcoa_task
from AXIOME3_app.tasks.bubbleplot import bubbleplot_task
from AXIOME3_app.tasks.triplot import triplot_task
from AXIOME3_app.tasks.pipeline import check_output_task
# Custom Exceptions
from AXIOME3_app.exceptions.exception import AXIOME3Error
from AXIOME3_app.email.gmail import SendMessage
def send_queue_email(_id, sender, recipient, taskName):
if(recipient is not None):
subject = "AXIOME3 task queued"
msgHtml = """
<div>
<h2>Session ID</h2>
<p>{_id}</p>
<h2>Message</h2>
<p>{taskName} task queued</p>
</div>
""".format(_id=_id, taskName=taskName)
SendMessage(
sender=sender,
recipient=recipient,
subject=subject,
msgHtml=msgHtml
)
blueprint = Blueprint("datahandle", __name__, url_prefix="/datahandle")
@blueprint.route("/inputupload", methods=['POST'])
def inputupload():
# Email ricipient
if("email" in request.form):
recipient = request.form["email"]
else:
recipient = None
# Use UUID4 for unique identifier
_id = str(request.form['uuid'])
URL = current_app.config["CELERY_BROKER_URL"]
sender = current_app.config["GMAIL_SENDER"]
# path to file to record task progress
# It will be used to retrieve working progress
# Maybe replace it with database later
task_progress_file = os.path.join('/output', _id, 'task_progress.txt')
try:
# Check if the upload is made from the client or server
if("manifest" in request.files):
manifest_file = request.files["manifest"]
elif("manifest" in request.form):
manifest_file = request.form["manifest"]
else:
raise FileNotFoundError("Manifest file must be uploaded!")
input_format = request.form["Input Format"]
sample_type = request.form["Sample Type"]
is_multiple = request.form["multiple run"]
# Do preliminary checks on manifest file
manifest_path = input_upload_helper.input_upload_precheck(
_id=_id,
uploaded_manifest=manifest_file,
input_format=input_format,
is_multiple=is_multiple
)
# Prepare necessary files for input upload
log_config_path = luigi_prep_helper.pipeline_setup(_id)
task_kwargs = {
'_id': _id,
'logging_config': log_config_path,
'manifest_path': manifest_path,
'sample_type': sample_type,
'input_format': input_format,
'is_multiple': is_multiple,
'URL': URL,
'task_progress_file': task_progress_file,
'sender': sender,
'recipient': recipient
}
send_queue_email(_id, sender, recipient, "Input Upload")
import_data_task.apply_async(kwargs=task_kwargs)
except AXIOME3Error as err:
current_app.logger.error(str(err))
return err.response
except FileNotFoundError as err:
current_app.logger.error(str(err))
return Response(str(err), status=400, mimetype='text/html')
except Exception as err:
current_app.logger.error(str(err))
return Response("Internal Server Error", status=500, mimetype='text/html')
return Response("Success!", status=200, mimetype='text/html')
@blueprint.route("/denoise", methods=['POST'])
def denoise():
# Email ricipient
if("email" in request.form):
recipient = request.form["email"]
else:
recipient = None
sender = current_app.config["GMAIL_SENDER"]
# Use UUID4 for unique identifier
_id = str(request.form['uuid'])
URL = current_app.config["CELERY_BROKER_URL"]
# path to file to record task progress
# It will be used to retrieve working progress
# Maybe replace it with database later
task_progress_file = os.path.join('/output', _id, 'task_progress.txt')
try:
# Check if the upload is made from the client or server
if("manifest" in request.files):
manifest_file = request.files["manifest"]
elif("manifest" in request.form):
manifest_file = request.form["manifest"]
else:
raise FileNotFoundError("Manifest file must be uploaded!")
input_format = request.form["Input Format"]
sample_type = request.form["Sample Type"]
is_multiple = request.form["multiple run"]
trunc_len_f = request.form["trunc-len-f"]
trunc_len_r = request.form["trunc-len-r"]
trim_left_f = request.form["trim-left-f"]
trim_left_r = request.form["trim-left-r"]
n_cores = request.form["cores"]
#denoise_input_path = denoise_helper.denoise_precheck(
# _id=_id,
# sequence_data=imported_qza
#)
manifest_path = input_upload_helper.input_upload_precheck(
_id=_id,
uploaded_manifest=manifest_file,
input_format=input_format,
is_multiple=is_multiple
)
# Prepare necessary files for denoise
log_config_path = luigi_prep_helper.pipeline_setup(_id)
# Copy input file to premade output dir
#denoise_helper.denoise_setup(denoise_input_path, _id)
task_kwargs = {
'_id': _id,
'logging_config': log_config_path,
'manifest_path': manifest_path,
'sample_type': sample_type,
'input_format': input_format,
'trim_left_f': trim_left_f,
'trunc_len_f': trunc_len_f,
'trim_left_r': trim_left_r,
'trunc_len_r': trunc_len_r,
'is_multiple': is_multiple,
'n_cores': n_cores,
'URL': URL,
'task_progress_file': task_progress_file,
'sender': sender,
'recipient': recipient
}
send_queue_email(_id, sender, recipient, "Denoise")
denoise_task.apply_async(kwargs=task_kwargs)
except AXIOME3Error as err:
current_app.logger.error(str(err))
return err.response
except FileNotFoundError as err:
current_app.logger.error(str(err))
return Response(str(err), status=400, mimetype='text/html')
except Exception as err:
current_app.logger.error(str(err))
return Response("Internal Server Error", status=500, mimetype='text/html')
return Response("Success!", status=200, mimetype='text/html')
@blueprint.route("/taxonomic_classification", methods=['POST'])
def taxonomic_classification():
# Email ricipient
if("email" in request.form):
recipient = request.form["email"]
else:
recipient = None
sender = current_app.config["GMAIL_SENDER"]
# Use UUID4 for unique identifier
_id = str(request.form['uuid'])
URL = current_app.config["CELERY_BROKER_URL"]
# path to file to record task progress
# It will be used to retrieve working progress
# Maybe replace it with database later
task_progress_file = os.path.join('/output', _id, 'task_progress.txt')
try:
# Check if the upload is made from the client or server
if("feature_table" in request.files):
feature_table = request.files["feature_table"]
elif("feature_table" in request.form):
feature_table = request.form["feature_table"]
else:
raise FileNotFoundError("Feature table must be uploaded!")
if("rep_seqs" in request.files):
rep_seqs = request.files["rep_seqs"]
elif("rep_seqs" in request.form):
rep_seqs = request.form["rep_seqs"]
else:
raise FileNotFoundError("Representative sequences must be uploaded!")
if("classifier" in request.files):
classifier = request.files["classifier"]
elif("classifier" in request.form):
classifier = request.form["classifier"]
else:
# use default classifier it not specified by users
# read the value from env file?
#classifier = "/pipeline/AXIOME3/2020_06_classifier_silva138_NR99_V4V5.qza"
classifier = None
n_cores = request.form["cores"]
feature_table_path, rep_seqs_path, classifier_path = taxonomic_classification_helper.taxonomic_classification_precheck(
_id=_id,
feature_table=feature_table,
rep_seqs=rep_seqs,
classifier=classifier
)
# Prepare necessary files for anlysis
log_config_path = luigi_prep_helper.pipeline_setup(_id)
# Copy input file to premade output dir
taxonomic_classification_helper.taxonomic_classification_setup(_id, feature_table_path, rep_seqs_path)
task_kwargs = {
'_id': _id,
'logging_config': log_config_path,
'classifier_path': classifier_path,
'n_cores': n_cores,
'URL': URL,
'task_progress_file': task_progress_file,
'sender': sender,
'recipient': recipient
}
send_queue_email(_id, sender, recipient, "Taxonomic Classification")
taxonomic_classification_task.apply_async(kwargs=task_kwargs)
except AXIOME3Error as err:
current_app.logger.error(str(err))
return err.response
except FileNotFoundError as err:
current_app.logger.error(str(err))
return Response(str(err), status=400, mimetype='text/html')
except Exception as err:
current_app.logger.error(str(err))
return Response("Internal Server Error", status=500, mimetype='text/html')
return Response("Success!", status=200, mimetype='text/html')
@blueprint.route("/analysis", methods=['POST'])
def analysis():
# Email ricipient
if("email" in request.form):
recipient = request.form["email"]
else:
recipient = None
sender = current_app.config["GMAIL_SENDER"]
# Use UUID4 for unique identifier
_id = str(request.form['uuid'])
URL = current_app.config["CELERY_BROKER_URL"]
# path to file to record task progress
# It will be used to retrieve working progress
# Maybe replace it with database later
task_progress_file = os.path.join('/output', _id, 'task_progress.txt')
try:
# Check if the upload is made from the client or server
if("feature_table" in request.files):
feature_table = request.files["feature_table"]
elif("feature_table" in request.form):
feature_table = request.form["feature_table"]
else:
raise FileNotFoundError("Feature table must be uploaded!")
if("rep_seqs" in request.files):
rep_seqs = request.files["rep_seqs"]
elif("rep_seqs" in request.form):
rep_seqs = request.form["rep_seqs"]
else:
raise FileNotFoundError("Representative sequences must be uploaded!")
if("taxonomy_qza" in request.files):
taxonomy_qza = request.files["taxonomy_qza"]
elif("taxonomy_qza" in request.form):
taxonomy_qza = request.form["taxonomy_qza"]
else:
raise FileNotFoundError("Taxonomy file must be uploaded!")
if("metadata" in request.files):
metadata = request.files["metadata"]
elif("metadata" in request.form):
metadata = request.form["metadata"]
else:
raise FileNotFoundError("Metadata must be uploaded!")
sampling_depth = request.form["sampling depth"]
n_cores = request.form["cores"]
feature_table_path, rep_seqs_path, taxonomy_path, metadata_path = analysis_helper.analysis_precheck(
_id=_id,
feature_table=feature_table,
rep_seqs=rep_seqs,
taxonomy=taxonomy_qza,
metadata=metadata,
)
# Prepare necessary files for anlysis
log_config_path = luigi_prep_helper.pipeline_setup(_id)
# Copy input file to premade output dir
analysis_helper.analysis_setup(_id, feature_table_path, rep_seqs_path, taxonomy_path)
task_kwargs = {
'_id': _id,
'logging_config': log_config_path,
'sampling_depth': sampling_depth,
'metadata_path': metadata_path,
'n_cores': n_cores,
'URL': URL,
'task_progress_file': task_progress_file,
'sender': sender,
'recipient': recipient
}
send_queue_email(_id, sender, recipient, "Analysis")
analysis_task.apply_async(kwargs=task_kwargs)
except AXIOME3Error as err:
current_app.logger.error(str(err))
return err.response
except FileNotFoundError as err:
current_app.logger.error(str(err))
return Response(str(err), status=400, mimetype='text/html')
except Exception as err:
current_app.logger.error(str(err))
return Response("Internal Server Error", status=500, mimetype='text/html')
return Response("Success!", status=200, mimetype='text/html')
@blueprint.route("/pcoa", methods=['POST'])
def pcoa():
# Use UUID4 for unique identifier
_id = str(request.form['uuid'])
URL = current_app.config["CELERY_BROKER_URL"]
# path to file to record task progress
# It will be used to retrieve working progress
# Maybe replace it with database later
task_progress_file = os.path.join('/output', _id, 'task_progress.txt')
try:
# Check if the upload is made from the client or server
if("pcoa_qza" in request.files):
pcoa_qza = request.files["pcoa_qza"]
elif("pcoa_qza" in request.form):
pcoa_qza = request.form["pcoa_qza"]
else:
raise FileNotFoundError("PCoA artifact must be uploaded!")
if("metadata" in request.files):
metadata = request.files["metadata"]
elif("metadata" in request.form):
metadata = request.form["metadata"]
else:
raise FileNotFoundError("Sample metadata must be uploaded!")
fill_variable = request.form["Fill variable"]
# Primary target must exist
if not(fill_variable):
return Response("Please specify `Fill variable`!", status=400, mimetype='text/html')
fill_variable_dtype = request.form["Fill variable data type"]
shape_variable = request.form["Shape variable"] if request.form["Shape variable"] else None
colour_set = request.form["Colour set"]
brewer_type = request.form["brewer type"]
alpha = request.form["alpha"]
stroke = request.form["stroke"]
point_size = request.form["point size"]
PC_axis_1 = request.form["PC axis 1"]
PC_axis_2 = request.form["PC axis 2"]
width = request.form["Width"]
height = request.form["Height"]
x_axis_text_size = request.form["x axis label size"]
y_axis_text_size = request.form["y axis label size"]
legend_title_size = request.form["legend title size"]
legend_text_size = request.form["legend text size"]
pcoa_path, metadata_path = extension_helper.validate_pcoa_input(
_id=_id,
pcoa_artifact_path=pcoa_qza,
metadata_path=metadata,
target_primary=fill_variable,
target_secondary=shape_variable
)
extension_helper.pcoa_setup(_id)
pcoa_kwargs = {
'pcoa': pcoa_path,
'metadata': metadata_path,
'fill_variable': fill_variable,
'fill_variable_dtype': fill_variable_dtype,
'shape_variable': shape_variable,
'colour_set': colour_set,
'brewer_type': brewer_type,
'alpha': float(alpha),
'stroke': float(stroke),
'point_size': float(point_size),
'PC_axis_1': int(PC_axis_1),
'PC_axis_2': int(PC_axis_2),
'width': width,
'height': height,
'x_axis_text_size': x_axis_text_size,
'y_axis_text_size': y_axis_text_size,
'legend_title_size': legend_title_size,
'legend_text_size': legend_text_size
}
pcoa_task.apply_async(args=[_id, URL, task_progress_file], kwargs=pcoa_kwargs)
except AXIOME3Error as err:
current_app.logger.error(str(err))
return err.response
except FileNotFoundError as err:
current_app.logger.error(str(err))
return Response(str(err), status=400, mimetype='text/html')
except Exception as err:
current_app.logger.error(str(err))
return Response("Internal Server Error", status=500, mimetype='text/html')
return Response("Success!", status=200, mimetype='text/html')
@blueprint.route("/bubbleplot", methods=['POST'])
def bubbleplot():
# Use UUID4 for unique identifier
_id = str(request.form['uuid'])
URL = current_app.config["CELERY_BROKER_URL"]
# path to file to record task progress
# It will be used to retrieve working progress
# Maybe replace it with database later
task_progress_file = os.path.join('/output', _id, 'task_progress.txt')
try:
# Check if the upload is made from the client or server
if("feature_table" in request.files):
feature_table_qza = request.files["feature_table"]
elif("feature_table" in request.form):
feature_table_qza = request.form["feature_table"]
else:
raise FileNotFoundError("Feature table must be uploaded!")
if("taxonomy_qza" in request.files):
taxonomy_qza = request.files["taxonomy_qza"]
elif("taxonomy_qza" in request.form):
taxonomy_qza = request.form["taxonomy_qza"]
else:
raise FileNotFoundError("Taxonomy artifact must be uploaded!")
# Optional metadata
if("metadata" in request.files):
metadata = request.files["metadata"]
elif("metadata" in request.form):
metadata = request.form["metadata"]
else:
metadata = None
taxa_level = request.form["Taxa collapse level"]
sort_level = request.form["Sort level"]
keyword_filter = request.form["Keyword filter"] if request.form["Keyword filter"] else None
fill_variable = request.form["Fill variable"] if (request.form["Fill variable"] and metadata is not None) else None
abundance_threshold = request.form["Abundance threshold"]
alpha = request.form["alpha"]
stroke = request.form["stroke"]
palette = request.form["Colour set"]
brewer_type = request.form["brewer type"]
width = request.form["Width"]
height = request.form["Height"]
feature_table_path, taxonomy_path, metadata_path = extension_helper.validate_bubbleplot_input(
_id=_id,
feature_table_artifact_path=feature_table_qza,
taxonomy_artifact_path=taxonomy_qza,
metadata_path=metadata,
fill_variable=fill_variable
)
extension_helper.bubbleplot_setup(_id)
bubbleplot_kwargs = {
'feature_table_artifact_path': feature_table_path,
'taxonomy_artifact_path': taxonomy_path,
'metadata_path': metadata_path,
'level': taxa_level,
'groupby_taxa': sort_level,
'abundance_threshold': float(abundance_threshold),
'keyword': keyword_filter,
'fill_variable': fill_variable,
'brewer_type': brewer_type,
'palette': palette,
'alpha': float(alpha),
'stroke': float(stroke),
'width': float(width),
'height': float(height)
}
bubbleplot_task.apply_async(args=[_id, URL, task_progress_file], kwargs=bubbleplot_kwargs)
except AXIOME3Error as err:
current_app.logger.error(str(err))
return err.response
except FileNotFoundError as err:
current_app.logger.error(str(err))
return Response(str(err), status=400, mimetype='text/html')
except Exception as err:
current_app.logger.error(str(err))
return Response("Internal Server Error", status=500, mimetype='text/html')
return Response("Success!", status=200, mimetype='text/html')
@blueprint.route("/triplot", methods=['POST'])
def triplot():
# Use UUID4 for unique identifier
_id = str(request.form['uuid'])
URL = current_app.config["CELERY_BROKER_URL"]
# path to file to record task progress
# It will be used to retrieve working progress
# Maybe replace it with database later
task_progress_file = os.path.join('/output', _id, 'task_progress.txt')
try:
# Check if the upload is made from the client or server
if("feature_table" in request.files):
feature_table_qza = request.files["feature_table"]
elif("feature_table" in request.form):
feature_table_qza = request.form["feature_table"]
else:
raise FileNotFoundError("Feature table must be uploaded!")
if("taxonomy_qza" in request.files):
taxonomy_qza = request.files["taxonomy_qza"]
elif("taxonomy_qza" in request.form):
taxonomy_qza = request.form["taxonomy_qza"]
else:
raise FileNotFoundError("Taxonomy artifact must be uploaded!")
if("metadata" in request.files):
metadata = request.files["metadata"]
elif("metadata" in request.form):
metadata = request.form["metadata"]
else:
raise FileNotFoundError("Sample metadata must be uploaded!")
if("environmental_metadata" in request.files):
environmental_metadata = request.files["environmental_metadata"]
elif("environmental_metadata" in request.form):
environmental_metadata = request.form["environmental_metadata"]
else:
raise FileNotFoundError("Environmental metadata must be uploaded!")
ordination_collapse_level = request.form["Ordination collapse level"]
weighted_average_collapse_level = request.form["Taxa weights collapse level"]
dissmilarity_index = request.form["Dissmilarity index"]
R2_threshold = request.form["R squared threshold"]
wa_threshold = request.form["Taxa weighted average threshold"]
pval_threshold = request.form["p-value threshold"]
fill_variable = request.form["Fill variable"]
# Fill variable must exist
if not(fill_variable):
return Response("Please specify fill variable!", status=400, mimetype='text/html')
fill_variable_dtype = request.form["Fill variable data type"]
colour_set = request.form["Colour set"]
brewer_type = request.form["brewer type"]
sampling_depth = request.form["Rarefaction depth"]
alpha = request.form["alpha"]
stroke = request.form["stroke"]
point_size = request.form["point size"]
PC_axis_1 = request.form["PC axis 1"]
PC_axis_2 = request.form["PC axis 2"]
width = request.form["Width"]
height = request.form["Height"]
x_axis_text_size = request.form["x axis label size"]
y_axis_text_size = request.form["y axis label size"]
taxa_text_size = request.form["taxa bubble text size"]
vector_arrow_text_size = request.form["vector arrow text size"]
legend_title_size = request.form["legend title size"]
legend_text_size = request.form["legend text size"]
feature_table_path, taxonomy_path, metadata_path, environmental_metadata_path = extension_helper.validate_triplot_input(
_id=_id,
feature_table_artifact_path=feature_table_qza,
taxonomy_artifact_path=taxonomy_qza,
metadata_path=metadata,
environmental_metadata_path=environmental_metadata,
fill_variable=fill_variable
)
extension_helper.triplot_setup(_id)
triplot_kwargs = {
'feature_table_artifact_path': feature_table_path,
'taxonomy_artifact_path': taxonomy_path,
'metadata_path': metadata_path,
'environmental_metadata_path': environmental_metadata_path,
'ordination_collapse_level': ordination_collapse_level,
'sampling_depth': int(sampling_depth),
'wascores_collapse_level': weighted_average_collapse_level,
'dissmilarity_index': dissmilarity_index,
'R2_threshold': float(R2_threshold),
'wa_threshold': float(wa_threshold),
'pval_threshold': float(pval_threshold),
'fill_variable': fill_variable,
'fill_variable_dtype': fill_variable_dtype,
'colour_set': colour_set,
'brewer_type': brewer_type,
'alpha': float(alpha),
'stroke': float(stroke),
'point_size': float(point_size),
'PC_axis_one': int(PC_axis_1),
'PC_axis_two': int(PC_axis_2),
'width': float(width),
'height': float(height),
'x_axis_text_size': x_axis_text_size,
'y_axis_text_size': y_axis_text_size,
'legend_title_size': legend_title_size,
'legend_text_size': legend_text_size,
'taxa_text_size': taxa_text_size,
'vector_arrow_text_size': vector_arrow_text_size
}
triplot_task.apply_async(args=[_id, URL, task_progress_file], kwargs=triplot_kwargs)
except AXIOME3Error as err:
current_app.logger.error(str(err))
return err.response
except FileNotFoundError as err:
current_app.logger.error(str(err))
return Response(str(err), status=400, mimetype='text/html')
except Exception as err:
current_app.logger.error(str(err))
return Response("Internal Server Error", status=500, mimetype='text/html')
return Response("Success!", status=200, mimetype='text/html')
| 32.890625
| 122
| 0.745843
|
49555d1bd088095fa77b169753a272f7fd6bd0c9
| 5,624
|
py
|
Python
|
homeassistant/components/deconz/alarm_control_panel.py
|
jlmaners/core
|
9d016dd4346ec776da40f816764a5be441e34a3b
|
[
"Apache-2.0"
] | null | null | null |
homeassistant/components/deconz/alarm_control_panel.py
|
jlmaners/core
|
9d016dd4346ec776da40f816764a5be441e34a3b
|
[
"Apache-2.0"
] | 25
|
2021-10-02T10:01:14.000Z
|
2022-03-31T06:11:49.000Z
|
homeassistant/components/deconz/alarm_control_panel.py
|
jlmaners/core
|
9d016dd4346ec776da40f816764a5be441e34a3b
|
[
"Apache-2.0"
] | 1
|
2021-12-10T10:33:28.000Z
|
2021-12-10T10:33:28.000Z
|
"""Support for deCONZ alarm control panel devices."""
from __future__ import annotations
from pydeconz.alarm_system import AlarmSystem
from pydeconz.sensor import (
ANCILLARY_CONTROL_ARMED_AWAY,
ANCILLARY_CONTROL_ARMED_NIGHT,
ANCILLARY_CONTROL_ARMED_STAY,
ANCILLARY_CONTROL_ARMING_AWAY,
ANCILLARY_CONTROL_ARMING_NIGHT,
ANCILLARY_CONTROL_ARMING_STAY,
ANCILLARY_CONTROL_DISARMED,
ANCILLARY_CONTROL_ENTRY_DELAY,
ANCILLARY_CONTROL_EXIT_DELAY,
ANCILLARY_CONTROL_IN_ALARM,
AncillaryControl,
)
from homeassistant.components.alarm_control_panel import (
DOMAIN,
AlarmControlPanelEntity,
AlarmControlPanelEntityFeature,
CodeFormat,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import (
STATE_ALARM_ARMED_AWAY,
STATE_ALARM_ARMED_HOME,
STATE_ALARM_ARMED_NIGHT,
STATE_ALARM_ARMING,
STATE_ALARM_DISARMED,
STATE_ALARM_PENDING,
STATE_ALARM_TRIGGERED,
)
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from .deconz_device import DeconzDevice
from .gateway import DeconzGateway, get_gateway_from_config_entry
DECONZ_TO_ALARM_STATE = {
ANCILLARY_CONTROL_ARMED_AWAY: STATE_ALARM_ARMED_AWAY,
ANCILLARY_CONTROL_ARMED_NIGHT: STATE_ALARM_ARMED_NIGHT,
ANCILLARY_CONTROL_ARMED_STAY: STATE_ALARM_ARMED_HOME,
ANCILLARY_CONTROL_ARMING_AWAY: STATE_ALARM_ARMING,
ANCILLARY_CONTROL_ARMING_NIGHT: STATE_ALARM_ARMING,
ANCILLARY_CONTROL_ARMING_STAY: STATE_ALARM_ARMING,
ANCILLARY_CONTROL_DISARMED: STATE_ALARM_DISARMED,
ANCILLARY_CONTROL_ENTRY_DELAY: STATE_ALARM_PENDING,
ANCILLARY_CONTROL_EXIT_DELAY: STATE_ALARM_PENDING,
ANCILLARY_CONTROL_IN_ALARM: STATE_ALARM_TRIGGERED,
}
def get_alarm_system_for_unique_id(
gateway: DeconzGateway, unique_id: str
) -> AlarmSystem | None:
"""Retrieve alarm system unique ID is registered to."""
for alarm_system in gateway.api.alarmsystems.values():
if unique_id in alarm_system.devices:
return alarm_system
return None
async def async_setup_entry(
hass: HomeAssistant,
config_entry: ConfigEntry,
async_add_entities: AddEntitiesCallback,
) -> None:
"""Set up the deCONZ alarm control panel devices.
Alarm control panels are based on the same device class as sensors in deCONZ.
"""
gateway = get_gateway_from_config_entry(hass, config_entry)
gateway.entities[DOMAIN] = set()
@callback
def async_add_alarm_control_panel(
sensors: list[AncillaryControl] | None = None,
) -> None:
"""Add alarm control panel devices from deCONZ."""
entities = []
if sensors is None:
sensors = list(gateway.api.sensors.ancillary_control.values())
for sensor in sensors:
if (
isinstance(sensor, AncillaryControl)
and sensor.unique_id not in gateway.entities[DOMAIN]
and (
alarm_system := get_alarm_system_for_unique_id(
gateway, sensor.unique_id
)
)
is not None
):
entities.append(DeconzAlarmControlPanel(sensor, gateway, alarm_system))
if entities:
async_add_entities(entities)
config_entry.async_on_unload(
async_dispatcher_connect(
hass,
gateway.signal_new_sensor,
async_add_alarm_control_panel,
)
)
async_add_alarm_control_panel()
class DeconzAlarmControlPanel(DeconzDevice, AlarmControlPanelEntity):
"""Representation of a deCONZ alarm control panel."""
TYPE = DOMAIN
_device: AncillaryControl
_attr_code_format = CodeFormat.NUMBER
_attr_supported_features = (
AlarmControlPanelEntityFeature.ARM_AWAY
| AlarmControlPanelEntityFeature.ARM_HOME
| AlarmControlPanelEntityFeature.ARM_NIGHT
)
def __init__(
self,
device: AncillaryControl,
gateway: DeconzGateway,
alarm_system: AlarmSystem,
) -> None:
"""Set up alarm control panel device."""
super().__init__(device, gateway)
self.alarm_system = alarm_system
@callback
def async_update_callback(self) -> None:
"""Update the control panels state."""
keys = {"panel", "reachable"}
if (
self._device.changed_keys.intersection(keys)
and self._device.panel in DECONZ_TO_ALARM_STATE
):
super().async_update_callback()
@property
def state(self) -> str | None:
"""Return the state of the control panel."""
if self._device.panel in DECONZ_TO_ALARM_STATE:
return DECONZ_TO_ALARM_STATE[self._device.panel]
return None
async def async_alarm_arm_away(self, code: str | None = None) -> None:
"""Send arm away command."""
if code:
await self.alarm_system.arm_away(code)
async def async_alarm_arm_home(self, code: str | None = None) -> None:
"""Send arm home command."""
if code:
await self.alarm_system.arm_stay(code)
async def async_alarm_arm_night(self, code: str | None = None) -> None:
"""Send arm night command."""
if code:
await self.alarm_system.arm_night(code)
async def async_alarm_disarm(self, code: str | None = None) -> None:
"""Send disarm command."""
if code:
await self.alarm_system.disarm(code)
| 31.954545
| 87
| 0.694346
|
d0ea84c0083dcad37b3ab6f8b1881e32ae8c6268
| 34,346
|
py
|
Python
|
pysnmp-with-texts/ADTRAN-MEF-PER-COS-PER-UNI-PERF-HISTORY-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 8
|
2019-05-09T17:04:00.000Z
|
2021-06-09T06:50:51.000Z
|
pysnmp-with-texts/ADTRAN-MEF-PER-COS-PER-UNI-PERF-HISTORY-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 4
|
2019-05-31T16:42:59.000Z
|
2020-01-31T21:57:17.000Z
|
pysnmp-with-texts/ADTRAN-MEF-PER-COS-PER-UNI-PERF-HISTORY-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 10
|
2019-04-30T05:51:36.000Z
|
2022-02-16T03:33:41.000Z
|
#
# PySNMP MIB module ADTRAN-MEF-PER-COS-PER-UNI-PERF-HISTORY-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/ADTRAN-MEF-PER-COS-PER-UNI-PERF-HISTORY-MIB
# Produced by pysmi-0.3.4 at Wed May 1 11:14:57 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
adGenAOSConformance, adGenAOSMef = mibBuilder.importSymbols("ADTRAN-AOS", "adGenAOSConformance", "adGenAOSMef")
adIdentity, = mibBuilder.importSymbols("ADTRAN-MIB", "adIdentity")
ObjectIdentifier, Integer, OctetString = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "Integer", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ValueRangeConstraint, ValueSizeConstraint, SingleValueConstraint, ConstraintsUnion, ConstraintsIntersection = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueRangeConstraint", "ValueSizeConstraint", "SingleValueConstraint", "ConstraintsUnion", "ConstraintsIntersection")
HCPerfInvalidIntervals, HCPerfTotalCount, HCPerfValidIntervals, HCPerfTimeElapsed, HCPerfIntervalCount, HCPerfCurrentCount = mibBuilder.importSymbols("HC-PerfHist-TC-MIB", "HCPerfInvalidIntervals", "HCPerfTotalCount", "HCPerfValidIntervals", "HCPerfTimeElapsed", "HCPerfIntervalCount", "HCPerfCurrentCount")
ifIndex, = mibBuilder.importSymbols("IF-MIB", "ifIndex")
NotificationGroup, ObjectGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ObjectGroup", "ModuleCompliance")
IpAddress, Gauge32, Counter32, Bits, TimeTicks, Counter64, Integer32, MibScalar, MibTable, MibTableRow, MibTableColumn, MibIdentifier, ModuleIdentity, iso, Unsigned32, ObjectIdentity, NotificationType = mibBuilder.importSymbols("SNMPv2-SMI", "IpAddress", "Gauge32", "Counter32", "Bits", "TimeTicks", "Counter64", "Integer32", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "MibIdentifier", "ModuleIdentity", "iso", "Unsigned32", "ObjectIdentity", "NotificationType")
TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "DisplayString")
adGenAosMefPerCosPerUniPerfHistoryMib = ModuleIdentity((1, 3, 6, 1, 4, 1, 664, 6, 10000, 53, 9, 2))
adGenAosMefPerCosPerUniPerfHistoryMib.setRevisions(('2014-09-10 00:00',))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
if mibBuilder.loadTexts: adGenAosMefPerCosPerUniPerfHistoryMib.setRevisionsDescriptions(('Initial version',))
if mibBuilder.loadTexts: adGenAosMefPerCosPerUniPerfHistoryMib.setLastUpdated('201409100000Z')
if mibBuilder.loadTexts: adGenAosMefPerCosPerUniPerfHistoryMib.setOrganization('ADTRAN Inc.')
if mibBuilder.loadTexts: adGenAosMefPerCosPerUniPerfHistoryMib.setContactInfo('Info: www.adtran.com Postal: ADTRAN, Inc. 901 Explorer Blvd. Huntsville, AL 35806 Tel: +1 888 423-8726 E-mail: support@adtran.com')
if mibBuilder.loadTexts: adGenAosMefPerCosPerUniPerfHistoryMib.setDescription('This MIB module defines high capacity performance statistics per COS per UNI within an AOS product. Copyright (C) ADTRAN, Inc. (2014).')
adGenAosMefPerCosPerUniPerfHistory = MibIdentifier((1, 3, 6, 1, 4, 1, 664, 5, 53, 9, 2))
adMefPerCosPerUniPhCurTable = MibTable((1, 3, 6, 1, 4, 1, 664, 5, 53, 9, 2, 1), )
if mibBuilder.loadTexts: adMefPerCosPerUniPhCurTable.setStatus('current')
if mibBuilder.loadTexts: adMefPerCosPerUniPhCurTable.setDescription('This table contains current performance history information that has been recorded since the last 15 minute interval ended and from when the last 1 day interval ended. This table is indexed by ifIndex and adMefPerCosPerUniPhCurQueueNumber.')
adMefPerCosPerUniPhCurEntry = MibTableRow((1, 3, 6, 1, 4, 1, 664, 5, 53, 9, 2, 1, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"), (0, "ADTRAN-MEF-PER-COS-PER-UNI-PERF-HISTORY-MIB", "adMefPerCosPerUniPhCurQueueNumber"))
if mibBuilder.loadTexts: adMefPerCosPerUniPhCurEntry.setStatus('current')
if mibBuilder.loadTexts: adMefPerCosPerUniPhCurEntry.setDescription("This specifies the information contained in one entry of the adMefPerCosPerUniPhCurTable. It is indexed by an interface's ifIndex, and the queue number.")
adMefPerCosPerUniPhCurQueueNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 9, 2, 1, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 7)))
if mibBuilder.loadTexts: adMefPerCosPerUniPhCurQueueNumber.setStatus('current')
if mibBuilder.loadTexts: adMefPerCosPerUniPhCurQueueNumber.setDescription('UNI Interface queue number.')
adMefPerCosPerUniPhCurTimeElapsed15Min = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 9, 2, 1, 1, 2), HCPerfTimeElapsed()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adMefPerCosPerUniPhCurTimeElapsed15Min.setStatus('current')
if mibBuilder.loadTexts: adMefPerCosPerUniPhCurTimeElapsed15Min.setDescription('Total elapsed seconds in the current 15 minute interval.')
adMefPerCosPerUniPhCurValidIntervals15Min = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 9, 2, 1, 1, 3), HCPerfValidIntervals()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adMefPerCosPerUniPhCurValidIntervals15Min.setStatus('current')
if mibBuilder.loadTexts: adMefPerCosPerUniPhCurValidIntervals15Min.setDescription('Number of valid 15 minute intervals over the last 24 hours.')
adMefPerCosPerUniPhCurInvalidIntervals15Min = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 9, 2, 1, 1, 4), HCPerfInvalidIntervals()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adMefPerCosPerUniPhCurInvalidIntervals15Min.setStatus('current')
if mibBuilder.loadTexts: adMefPerCosPerUniPhCurInvalidIntervals15Min.setDescription('Number of invalid 15 minute intervals over the last 24 hours.')
adMefPerCosPerUniPhCurIngressGreenOctets15Min = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 9, 2, 1, 1, 5), HCPerfCurrentCount()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adMefPerCosPerUniPhCurIngressGreenOctets15Min.setStatus('current')
if mibBuilder.loadTexts: adMefPerCosPerUniPhCurIngressGreenOctets15Min.setDescription('Count of ingress green octets in the current 15 minute interval.')
adMefPerCosPerUniPhCurIngressGreenFrames15Min = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 9, 2, 1, 1, 6), HCPerfCurrentCount()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adMefPerCosPerUniPhCurIngressGreenFrames15Min.setStatus('current')
if mibBuilder.loadTexts: adMefPerCosPerUniPhCurIngressGreenFrames15Min.setDescription('Count of ingress green frames in the current 15 minute interval.')
adMefPerCosPerUniPhCurEgressGreenOctets15Min = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 9, 2, 1, 1, 7), HCPerfCurrentCount()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adMefPerCosPerUniPhCurEgressGreenOctets15Min.setStatus('current')
if mibBuilder.loadTexts: adMefPerCosPerUniPhCurEgressGreenOctets15Min.setDescription('Count of egress green frames in the current 15 minute interval.')
adMefPerCosPerUniPhCurEgressGreenFrames15Min = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 9, 2, 1, 1, 8), HCPerfCurrentCount()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adMefPerCosPerUniPhCurEgressGreenFrames15Min.setStatus('current')
if mibBuilder.loadTexts: adMefPerCosPerUniPhCurEgressGreenFrames15Min.setDescription('Count of egress green frames in the current 15 minute interval.')
adMefPerCosPerUniPhCurIngressGreenFrameDiscards15Min = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 9, 2, 1, 1, 9), HCPerfCurrentCount()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adMefPerCosPerUniPhCurIngressGreenFrameDiscards15Min.setStatus('current')
if mibBuilder.loadTexts: adMefPerCosPerUniPhCurIngressGreenFrameDiscards15Min.setDescription('Count of ingress green frames discarded in the current 15 minute interval.')
adMefPerCosPerUniPhCurEgressGreenFrameDiscards15Min = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 9, 2, 1, 1, 10), HCPerfCurrentCount()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adMefPerCosPerUniPhCurEgressGreenFrameDiscards15Min.setStatus('current')
if mibBuilder.loadTexts: adMefPerCosPerUniPhCurEgressGreenFrameDiscards15Min.setDescription('Count of egress green frames discarded in the current 15 minute interval.')
adMefPerCosPerUniPhCurIngressGreenOctetDiscards15Min = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 9, 2, 1, 1, 11), HCPerfCurrentCount()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adMefPerCosPerUniPhCurIngressGreenOctetDiscards15Min.setStatus('current')
if mibBuilder.loadTexts: adMefPerCosPerUniPhCurIngressGreenOctetDiscards15Min.setDescription('Count of ingress green octets discarded in the current 15 minute interval.')
adMefPerCosPerUniPhCurEgressGreenOctetDiscards15Min = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 9, 2, 1, 1, 12), HCPerfCurrentCount()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adMefPerCosPerUniPhCurEgressGreenOctetDiscards15Min.setStatus('current')
if mibBuilder.loadTexts: adMefPerCosPerUniPhCurEgressGreenOctetDiscards15Min.setDescription('Count of egress green octets discarded in the current 15 minute interval.')
adMefPerCosPerUniPhCurTimeElapsed1Day = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 9, 2, 1, 1, 13), HCPerfTimeElapsed()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adMefPerCosPerUniPhCurTimeElapsed1Day.setStatus('current')
if mibBuilder.loadTexts: adMefPerCosPerUniPhCurTimeElapsed1Day.setDescription('Total elapsed seconds in the current 1 day interval.')
adMefPerCosPerUniPhCurValidIntervals1Day = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 9, 2, 1, 1, 14), HCPerfValidIntervals()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adMefPerCosPerUniPhCurValidIntervals1Day.setStatus('current')
if mibBuilder.loadTexts: adMefPerCosPerUniPhCurValidIntervals1Day.setDescription('Number of valid 1 day intervals available.')
adMefPerCosPerUniPhCurInvalidIntervals1Day = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 9, 2, 1, 1, 15), HCPerfInvalidIntervals()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adMefPerCosPerUniPhCurInvalidIntervals1Day.setStatus('current')
if mibBuilder.loadTexts: adMefPerCosPerUniPhCurInvalidIntervals1Day.setDescription('Number of invalid 1 day intervals available.')
adMefPerCosPerUniPhCurIngressGreenOctets1Day = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 9, 2, 1, 1, 16), HCPerfCurrentCount()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adMefPerCosPerUniPhCurIngressGreenOctets1Day.setStatus('current')
if mibBuilder.loadTexts: adMefPerCosPerUniPhCurIngressGreenOctets1Day.setDescription('Count of ingress green octets in the current 1 day interval.')
adMefPerCosPerUniPhCurIngressGreenFrames1Day = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 9, 2, 1, 1, 17), HCPerfCurrentCount()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adMefPerCosPerUniPhCurIngressGreenFrames1Day.setStatus('current')
if mibBuilder.loadTexts: adMefPerCosPerUniPhCurIngressGreenFrames1Day.setDescription('Count of ingress green frames in the current 1 day interval.')
adMefPerCosPerUniPhCurEgressGreenOctets1Day = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 9, 2, 1, 1, 18), HCPerfCurrentCount()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adMefPerCosPerUniPhCurEgressGreenOctets1Day.setStatus('current')
if mibBuilder.loadTexts: adMefPerCosPerUniPhCurEgressGreenOctets1Day.setDescription('Count of egress green octets in the current 1 day interval.')
adMefPerCosPerUniPhCurEgressGreenFrames1Day = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 9, 2, 1, 1, 19), HCPerfCurrentCount()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adMefPerCosPerUniPhCurEgressGreenFrames1Day.setStatus('current')
if mibBuilder.loadTexts: adMefPerCosPerUniPhCurEgressGreenFrames1Day.setDescription('Count of egress green frames in the current 1 day interval.')
adMefPerCosPerUniPhCurIngressGreenFrameDiscards1Day = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 9, 2, 1, 1, 20), HCPerfCurrentCount()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adMefPerCosPerUniPhCurIngressGreenFrameDiscards1Day.setStatus('current')
if mibBuilder.loadTexts: adMefPerCosPerUniPhCurIngressGreenFrameDiscards1Day.setDescription('Count of ingress green frames discarded in the current 1 day interval.')
adMefPerCosPerUniPhCurEgressGreenFrameDiscards1Day = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 9, 2, 1, 1, 21), HCPerfCurrentCount()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adMefPerCosPerUniPhCurEgressGreenFrameDiscards1Day.setStatus('current')
if mibBuilder.loadTexts: adMefPerCosPerUniPhCurEgressGreenFrameDiscards1Day.setDescription('Count of egress green frames discarded in the current 1 day interval.')
adMefPerCosPerUniPhCurIngressGreenOctetDiscards1Day = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 9, 2, 1, 1, 22), HCPerfCurrentCount()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adMefPerCosPerUniPhCurIngressGreenOctetDiscards1Day.setStatus('current')
if mibBuilder.loadTexts: adMefPerCosPerUniPhCurIngressGreenOctetDiscards1Day.setDescription('Count of ingress green octets discarded in the current 1 day interval.')
adMefPerCosPerUniPhCurEgressGreenOctetDiscards1Day = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 9, 2, 1, 1, 23), HCPerfCurrentCount()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adMefPerCosPerUniPhCurEgressGreenOctetDiscards1Day.setStatus('current')
if mibBuilder.loadTexts: adMefPerCosPerUniPhCurEgressGreenOctetDiscards1Day.setDescription('Count of egress green octets discarded in the current 1 day interval.')
adMefPerCosPerUniPh15MinIntervalTable = MibTable((1, 3, 6, 1, 4, 1, 664, 5, 53, 9, 2, 2), )
if mibBuilder.loadTexts: adMefPerCosPerUniPh15MinIntervalTable.setStatus('current')
if mibBuilder.loadTexts: adMefPerCosPerUniPh15MinIntervalTable.setDescription('This table contains performance history information for each valid 15 minute interval. This table is indexed by ifIndex, the queue number, and the interval number.')
adMefPerCosPerUniPh15MinIntervalEntry = MibTableRow((1, 3, 6, 1, 4, 1, 664, 5, 53, 9, 2, 2, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"), (0, "ADTRAN-MEF-PER-COS-PER-UNI-PERF-HISTORY-MIB", "adMefPerCosPerUniPh15MinQueueNumber"), (0, "ADTRAN-MEF-PER-COS-PER-UNI-PERF-HISTORY-MIB", "adMefPerCosPerUniPh15MinIntervalNumber"))
if mibBuilder.loadTexts: adMefPerCosPerUniPh15MinIntervalEntry.setStatus('current')
if mibBuilder.loadTexts: adMefPerCosPerUniPh15MinIntervalEntry.setDescription('An entry in the adMefPerCosPerUniPh15MinIntervalTable.')
adMefPerCosPerUniPh15MinQueueNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 9, 2, 2, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 7)))
if mibBuilder.loadTexts: adMefPerCosPerUniPh15MinQueueNumber.setStatus('current')
if mibBuilder.loadTexts: adMefPerCosPerUniPh15MinQueueNumber.setDescription('UNI Interface queue number.')
adMefPerCosPerUniPh15MinIntervalNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 9, 2, 2, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 96)))
if mibBuilder.loadTexts: adMefPerCosPerUniPh15MinIntervalNumber.setStatus('current')
if mibBuilder.loadTexts: adMefPerCosPerUniPh15MinIntervalNumber.setDescription('Performance history interval number. Interval 1 is the most recent previous interval; interval 96 is 24 hours ago. Intervals 2..96 are optional.')
adMefPerCosPerUniPh15MinIngressGreenOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 9, 2, 2, 1, 3), HCPerfIntervalCount()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adMefPerCosPerUniPh15MinIngressGreenOctets.setStatus('current')
if mibBuilder.loadTexts: adMefPerCosPerUniPh15MinIngressGreenOctets.setDescription('Count of ingress green octets in the 15 minute interval.')
adMefPerCosPerUniPh15MinIngressGreenFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 9, 2, 2, 1, 4), HCPerfIntervalCount()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adMefPerCosPerUniPh15MinIngressGreenFrames.setStatus('current')
if mibBuilder.loadTexts: adMefPerCosPerUniPh15MinIngressGreenFrames.setDescription('Count of ingress green frames in the 15 minute interval.')
adMefPerCosPerUniPh15MinEgressGreenOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 9, 2, 2, 1, 5), HCPerfIntervalCount()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adMefPerCosPerUniPh15MinEgressGreenOctets.setStatus('current')
if mibBuilder.loadTexts: adMefPerCosPerUniPh15MinEgressGreenOctets.setDescription('Count of egress green octets in the 15 minute interval.')
adMefPerCosPerUniPh15MinEgressGreenFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 9, 2, 2, 1, 6), HCPerfIntervalCount()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adMefPerCosPerUniPh15MinEgressGreenFrames.setStatus('current')
if mibBuilder.loadTexts: adMefPerCosPerUniPh15MinEgressGreenFrames.setDescription('Count of egress green frames in the 15 minute interval.')
adMefPerCosPerUniPh15MinIngressGreenFrameDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 9, 2, 2, 1, 7), HCPerfIntervalCount()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adMefPerCosPerUniPh15MinIngressGreenFrameDiscards.setStatus('current')
if mibBuilder.loadTexts: adMefPerCosPerUniPh15MinIngressGreenFrameDiscards.setDescription('Count of ingress green frames discarded in the 15 minute interval.')
adMefPerCosPerUniPh15MinEgressGreenFrameDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 9, 2, 2, 1, 8), HCPerfIntervalCount()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adMefPerCosPerUniPh15MinEgressGreenFrameDiscards.setStatus('current')
if mibBuilder.loadTexts: adMefPerCosPerUniPh15MinEgressGreenFrameDiscards.setDescription('Count of egress green frames discarded in the 15 minute interval.')
adMefPerCosPerUniPh15MinIngressGreenOctetDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 9, 2, 2, 1, 9), HCPerfIntervalCount()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adMefPerCosPerUniPh15MinIngressGreenOctetDiscards.setStatus('current')
if mibBuilder.loadTexts: adMefPerCosPerUniPh15MinIngressGreenOctetDiscards.setDescription('Count of ingress green octets discarded in the 15 minute interval.')
adMefPerCosPerUniPh15MinEgressGreenOctetDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 9, 2, 2, 1, 10), HCPerfIntervalCount()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adMefPerCosPerUniPh15MinEgressGreenOctetDiscards.setStatus('current')
if mibBuilder.loadTexts: adMefPerCosPerUniPh15MinEgressGreenOctetDiscards.setDescription('Count of egress green octets discarded in the 15 minute interval.')
adMefPerCosPerUniPh1DayIntervalTable = MibTable((1, 3, 6, 1, 4, 1, 664, 5, 53, 9, 2, 3), )
if mibBuilder.loadTexts: adMefPerCosPerUniPh1DayIntervalTable.setStatus('current')
if mibBuilder.loadTexts: adMefPerCosPerUniPh1DayIntervalTable.setDescription('This table contains performance history information for each valid 1 day interval. This table is indexed by by ifIndex, the queue number, and the interval number.')
adMefPerCosPerUniPh1DayIntervalEntry = MibTableRow((1, 3, 6, 1, 4, 1, 664, 5, 53, 9, 2, 3, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"), (0, "ADTRAN-MEF-PER-COS-PER-UNI-PERF-HISTORY-MIB", "adMefPerCosPerUniPh1DayQueueNumber"), (0, "ADTRAN-MEF-PER-COS-PER-UNI-PERF-HISTORY-MIB", "adMefPerCosPerUniPh1DayIntervalNumber"))
if mibBuilder.loadTexts: adMefPerCosPerUniPh1DayIntervalEntry.setStatus('current')
if mibBuilder.loadTexts: adMefPerCosPerUniPh1DayIntervalEntry.setDescription('An entry in the adMefPerCosPerUniPh1DayIntervalTable.')
adMefPerCosPerUniPh1DayQueueNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 9, 2, 3, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 7)))
if mibBuilder.loadTexts: adMefPerCosPerUniPh1DayQueueNumber.setStatus('current')
if mibBuilder.loadTexts: adMefPerCosPerUniPh1DayQueueNumber.setDescription('UNI Interface queue number.')
adMefPerCosPerUniPh1DayIntervalNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 9, 2, 3, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 30)))
if mibBuilder.loadTexts: adMefPerCosPerUniPh1DayIntervalNumber.setStatus('current')
if mibBuilder.loadTexts: adMefPerCosPerUniPh1DayIntervalNumber.setDescription('Performance history interval number. Interval 1 is the most recent previous day; interval 7 is 7 days ago. Intervals 2..30 are optional.')
adMefPerCosPerUniPh1DayIngressGreenOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 9, 2, 3, 1, 3), HCPerfTotalCount()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adMefPerCosPerUniPh1DayIngressGreenOctets.setStatus('current')
if mibBuilder.loadTexts: adMefPerCosPerUniPh1DayIngressGreenOctets.setDescription('Count of ingress green octets in the 1 day interval.')
adMefPerCosPerUniPh1DayIngressGreenFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 9, 2, 3, 1, 4), HCPerfTotalCount()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adMefPerCosPerUniPh1DayIngressGreenFrames.setStatus('current')
if mibBuilder.loadTexts: adMefPerCosPerUniPh1DayIngressGreenFrames.setDescription('Count of ingress green frames in the 1 day interval.')
adMefPerCosPerUniPh1DayEgressGreenOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 9, 2, 3, 1, 5), HCPerfTotalCount()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adMefPerCosPerUniPh1DayEgressGreenOctets.setStatus('current')
if mibBuilder.loadTexts: adMefPerCosPerUniPh1DayEgressGreenOctets.setDescription('Count of egress green octets in the 1 day interval.')
adMefPerCosPerUniPh1DayEgressGreenFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 9, 2, 3, 1, 6), HCPerfTotalCount()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adMefPerCosPerUniPh1DayEgressGreenFrames.setStatus('current')
if mibBuilder.loadTexts: adMefPerCosPerUniPh1DayEgressGreenFrames.setDescription('Count of egress green frames in the 1 day interval.')
adMefPerCosPerUniPh1DayIngressGreenFrameDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 9, 2, 3, 1, 7), HCPerfTotalCount()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adMefPerCosPerUniPh1DayIngressGreenFrameDiscards.setStatus('current')
if mibBuilder.loadTexts: adMefPerCosPerUniPh1DayIngressGreenFrameDiscards.setDescription('Count of ingress green frames discarded in the 1 day interval.')
adMefPerCosPerUniPh1DayEgressGreenFrameDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 9, 2, 3, 1, 8), HCPerfTotalCount()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adMefPerCosPerUniPh1DayEgressGreenFrameDiscards.setStatus('current')
if mibBuilder.loadTexts: adMefPerCosPerUniPh1DayEgressGreenFrameDiscards.setDescription('Count of egress green frames discarded in the 1 day interval.')
adMefPerCosPerUniPh1DayIngressGreenOctetDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 9, 2, 3, 1, 9), HCPerfTotalCount()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adMefPerCosPerUniPh1DayIngressGreenOctetDiscards.setStatus('current')
if mibBuilder.loadTexts: adMefPerCosPerUniPh1DayIngressGreenOctetDiscards.setDescription('Count of ingress green octets discarded in the 1 day interval.')
adMefPerCosPerUniPh1DayEgressGreenOctetDiscards = MibTableColumn((1, 3, 6, 1, 4, 1, 664, 5, 53, 9, 2, 3, 1, 10), HCPerfTotalCount()).setMaxAccess("readonly")
if mibBuilder.loadTexts: adMefPerCosPerUniPh1DayEgressGreenOctetDiscards.setStatus('current')
if mibBuilder.loadTexts: adMefPerCosPerUniPh1DayEgressGreenOctetDiscards.setDescription('Count of egress green octets discarded in the 1 day interval.')
adGenAosMefPerCosPerUniPerfHistoryConformance = MibIdentifier((1, 3, 6, 1, 4, 1, 664, 5, 53, 99, 21))
adGenAosMefPerCosPerUniPerfHistoryGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 664, 5, 53, 99, 21, 1))
adGenAosMefPerCosPerUniPerfHistoryCompliances = MibIdentifier((1, 3, 6, 1, 4, 1, 664, 5, 53, 99, 21, 2))
adGenAosMefPerCosPerUniPerfHistoryCompliance = ModuleCompliance((1, 3, 6, 1, 4, 1, 664, 5, 53, 99, 21, 2, 1)).setObjects(("ADTRAN-MEF-PER-COS-PER-UNI-PERF-HISTORY-MIB", "adMefPerCosPerUniPhCurGroup"), ("ADTRAN-MEF-PER-COS-PER-UNI-PERF-HISTORY-MIB", "adMefPerCosPerUniPh15MinIntervalGroup"), ("ADTRAN-MEF-PER-COS-PER-UNI-PERF-HISTORY-MIB", "adMefPerCosPerUniPh1DayIntervalGroup"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
adGenAosMefPerCosPerUniPerfHistoryCompliance = adGenAosMefPerCosPerUniPerfHistoryCompliance.setStatus('current')
if mibBuilder.loadTexts: adGenAosMefPerCosPerUniPerfHistoryCompliance.setDescription('The compliance statement for SNMPv2 entities which implement UNI interface per-queue performance history.')
adMefPerCosPerUniPhCurGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 664, 5, 53, 99, 21, 1, 1)).setObjects(("ADTRAN-MEF-PER-COS-PER-UNI-PERF-HISTORY-MIB", "adMefPerCosPerUniPhCurTimeElapsed15Min"), ("ADTRAN-MEF-PER-COS-PER-UNI-PERF-HISTORY-MIB", "adMefPerCosPerUniPhCurValidIntervals15Min"), ("ADTRAN-MEF-PER-COS-PER-UNI-PERF-HISTORY-MIB", "adMefPerCosPerUniPhCurInvalidIntervals15Min"), ("ADTRAN-MEF-PER-COS-PER-UNI-PERF-HISTORY-MIB", "adMefPerCosPerUniPhCurIngressGreenOctets15Min"), ("ADTRAN-MEF-PER-COS-PER-UNI-PERF-HISTORY-MIB", "adMefPerCosPerUniPhCurIngressGreenFrames15Min"), ("ADTRAN-MEF-PER-COS-PER-UNI-PERF-HISTORY-MIB", "adMefPerCosPerUniPhCurEgressGreenOctets15Min"), ("ADTRAN-MEF-PER-COS-PER-UNI-PERF-HISTORY-MIB", "adMefPerCosPerUniPhCurEgressGreenFrames15Min"), ("ADTRAN-MEF-PER-COS-PER-UNI-PERF-HISTORY-MIB", "adMefPerCosPerUniPhCurIngressGreenFrameDiscards15Min"), ("ADTRAN-MEF-PER-COS-PER-UNI-PERF-HISTORY-MIB", "adMefPerCosPerUniPhCurEgressGreenFrameDiscards15Min"), ("ADTRAN-MEF-PER-COS-PER-UNI-PERF-HISTORY-MIB", "adMefPerCosPerUniPhCurIngressGreenOctetDiscards15Min"), ("ADTRAN-MEF-PER-COS-PER-UNI-PERF-HISTORY-MIB", "adMefPerCosPerUniPhCurEgressGreenOctetDiscards15Min"), ("ADTRAN-MEF-PER-COS-PER-UNI-PERF-HISTORY-MIB", "adMefPerCosPerUniPhCurTimeElapsed1Day"), ("ADTRAN-MEF-PER-COS-PER-UNI-PERF-HISTORY-MIB", "adMefPerCosPerUniPhCurValidIntervals1Day"), ("ADTRAN-MEF-PER-COS-PER-UNI-PERF-HISTORY-MIB", "adMefPerCosPerUniPhCurInvalidIntervals1Day"), ("ADTRAN-MEF-PER-COS-PER-UNI-PERF-HISTORY-MIB", "adMefPerCosPerUniPhCurIngressGreenOctets1Day"), ("ADTRAN-MEF-PER-COS-PER-UNI-PERF-HISTORY-MIB", "adMefPerCosPerUniPhCurIngressGreenFrames1Day"), ("ADTRAN-MEF-PER-COS-PER-UNI-PERF-HISTORY-MIB", "adMefPerCosPerUniPhCurEgressGreenOctets1Day"), ("ADTRAN-MEF-PER-COS-PER-UNI-PERF-HISTORY-MIB", "adMefPerCosPerUniPhCurEgressGreenFrames1Day"), ("ADTRAN-MEF-PER-COS-PER-UNI-PERF-HISTORY-MIB", "adMefPerCosPerUniPhCurIngressGreenFrameDiscards1Day"), ("ADTRAN-MEF-PER-COS-PER-UNI-PERF-HISTORY-MIB", "adMefPerCosPerUniPhCurEgressGreenFrameDiscards1Day"), ("ADTRAN-MEF-PER-COS-PER-UNI-PERF-HISTORY-MIB", "adMefPerCosPerUniPhCurIngressGreenOctetDiscards1Day"), ("ADTRAN-MEF-PER-COS-PER-UNI-PERF-HISTORY-MIB", "adMefPerCosPerUniPhCurEgressGreenOctetDiscards1Day"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
adMefPerCosPerUniPhCurGroup = adMefPerCosPerUniPhCurGroup.setStatus('current')
if mibBuilder.loadTexts: adMefPerCosPerUniPhCurGroup.setDescription('The Current Group.')
adMefPerCosPerUniPh15MinIntervalGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 664, 5, 53, 99, 21, 1, 2)).setObjects(("ADTRAN-MEF-PER-COS-PER-UNI-PERF-HISTORY-MIB", "adMefPerCosPerUniPh15MinIngressGreenOctets"), ("ADTRAN-MEF-PER-COS-PER-UNI-PERF-HISTORY-MIB", "adMefPerCosPerUniPh15MinIngressGreenFrames"), ("ADTRAN-MEF-PER-COS-PER-UNI-PERF-HISTORY-MIB", "adMefPerCosPerUniPh15MinEgressGreenOctets"), ("ADTRAN-MEF-PER-COS-PER-UNI-PERF-HISTORY-MIB", "adMefPerCosPerUniPh15MinEgressGreenFrames"), ("ADTRAN-MEF-PER-COS-PER-UNI-PERF-HISTORY-MIB", "adMefPerCosPerUniPh15MinIngressGreenFrameDiscards"), ("ADTRAN-MEF-PER-COS-PER-UNI-PERF-HISTORY-MIB", "adMefPerCosPerUniPh15MinEgressGreenFrameDiscards"), ("ADTRAN-MEF-PER-COS-PER-UNI-PERF-HISTORY-MIB", "adMefPerCosPerUniPh15MinIngressGreenOctetDiscards"), ("ADTRAN-MEF-PER-COS-PER-UNI-PERF-HISTORY-MIB", "adMefPerCosPerUniPh15MinEgressGreenOctetDiscards"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
adMefPerCosPerUniPh15MinIntervalGroup = adMefPerCosPerUniPh15MinIntervalGroup.setStatus('current')
if mibBuilder.loadTexts: adMefPerCosPerUniPh15MinIntervalGroup.setDescription('The 15 minute interval group.')
adMefPerCosPerUniPh1DayIntervalGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 664, 5, 53, 99, 21, 1, 3)).setObjects(("ADTRAN-MEF-PER-COS-PER-UNI-PERF-HISTORY-MIB", "adMefPerCosPerUniPh1DayIngressGreenOctets"), ("ADTRAN-MEF-PER-COS-PER-UNI-PERF-HISTORY-MIB", "adMefPerCosPerUniPh1DayIngressGreenFrames"), ("ADTRAN-MEF-PER-COS-PER-UNI-PERF-HISTORY-MIB", "adMefPerCosPerUniPh1DayEgressGreenOctets"), ("ADTRAN-MEF-PER-COS-PER-UNI-PERF-HISTORY-MIB", "adMefPerCosPerUniPh1DayEgressGreenFrames"), ("ADTRAN-MEF-PER-COS-PER-UNI-PERF-HISTORY-MIB", "adMefPerCosPerUniPh1DayIngressGreenFrameDiscards"), ("ADTRAN-MEF-PER-COS-PER-UNI-PERF-HISTORY-MIB", "adMefPerCosPerUniPh1DayEgressGreenFrameDiscards"), ("ADTRAN-MEF-PER-COS-PER-UNI-PERF-HISTORY-MIB", "adMefPerCosPerUniPh1DayIngressGreenOctetDiscards"), ("ADTRAN-MEF-PER-COS-PER-UNI-PERF-HISTORY-MIB", "adMefPerCosPerUniPh1DayEgressGreenOctetDiscards"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
adMefPerCosPerUniPh1DayIntervalGroup = adMefPerCosPerUniPh1DayIntervalGroup.setStatus('current')
if mibBuilder.loadTexts: adMefPerCosPerUniPh1DayIntervalGroup.setDescription('The 1 day interval group.')
mibBuilder.exportSymbols("ADTRAN-MEF-PER-COS-PER-UNI-PERF-HISTORY-MIB", adMefPerCosPerUniPh15MinEgressGreenOctetDiscards=adMefPerCosPerUniPh15MinEgressGreenOctetDiscards, adMefPerCosPerUniPhCurEgressGreenFrameDiscards1Day=adMefPerCosPerUniPhCurEgressGreenFrameDiscards1Day, adMefPerCosPerUniPhCurIngressGreenOctets1Day=adMefPerCosPerUniPhCurIngressGreenOctets1Day, adMefPerCosPerUniPhCurGroup=adMefPerCosPerUniPhCurGroup, adMefPerCosPerUniPh15MinIntervalGroup=adMefPerCosPerUniPh15MinIntervalGroup, adMefPerCosPerUniPhCurEgressGreenOctets15Min=adMefPerCosPerUniPhCurEgressGreenOctets15Min, adMefPerCosPerUniPh1DayIngressGreenOctetDiscards=adMefPerCosPerUniPh1DayIngressGreenOctetDiscards, adMefPerCosPerUniPh15MinIngressGreenFrameDiscards=adMefPerCosPerUniPh15MinIngressGreenFrameDiscards, adMefPerCosPerUniPh15MinIntervalTable=adMefPerCosPerUniPh15MinIntervalTable, adMefPerCosPerUniPh1DayQueueNumber=adMefPerCosPerUniPh1DayQueueNumber, adMefPerCosPerUniPhCurValidIntervals1Day=adMefPerCosPerUniPhCurValidIntervals1Day, adGenAosMefPerCosPerUniPerfHistoryCompliance=adGenAosMefPerCosPerUniPerfHistoryCompliance, adMefPerCosPerUniPhCurEgressGreenFrames15Min=adMefPerCosPerUniPhCurEgressGreenFrames15Min, adMefPerCosPerUniPhCurTimeElapsed1Day=adMefPerCosPerUniPhCurTimeElapsed1Day, adMefPerCosPerUniPhCurValidIntervals15Min=adMefPerCosPerUniPhCurValidIntervals15Min, adMefPerCosPerUniPh1DayEgressGreenOctets=adMefPerCosPerUniPh1DayEgressGreenOctets, adMefPerCosPerUniPh1DayIntervalEntry=adMefPerCosPerUniPh1DayIntervalEntry, adMefPerCosPerUniPh1DayEgressGreenFrameDiscards=adMefPerCosPerUniPh1DayEgressGreenFrameDiscards, adMefPerCosPerUniPhCurEgressGreenOctetDiscards1Day=adMefPerCosPerUniPhCurEgressGreenOctetDiscards1Day, adGenAosMefPerCosPerUniPerfHistory=adGenAosMefPerCosPerUniPerfHistory, adMefPerCosPerUniPhCurIngressGreenOctetDiscards1Day=adMefPerCosPerUniPhCurIngressGreenOctetDiscards1Day, adMefPerCosPerUniPhCurIngressGreenFrameDiscards15Min=adMefPerCosPerUniPhCurIngressGreenFrameDiscards15Min, adMefPerCosPerUniPh15MinIngressGreenOctetDiscards=adMefPerCosPerUniPh15MinIngressGreenOctetDiscards, adMefPerCosPerUniPhCurIngressGreenFrames1Day=adMefPerCosPerUniPhCurIngressGreenFrames1Day, adMefPerCosPerUniPhCurEntry=adMefPerCosPerUniPhCurEntry, adMefPerCosPerUniPh1DayIntervalNumber=adMefPerCosPerUniPh1DayIntervalNumber, adGenAosMefPerCosPerUniPerfHistoryGroups=adGenAosMefPerCosPerUniPerfHistoryGroups, adMefPerCosPerUniPhCurEgressGreenFrames1Day=adMefPerCosPerUniPhCurEgressGreenFrames1Day, adMefPerCosPerUniPh1DayIntervalGroup=adMefPerCosPerUniPh1DayIntervalGroup, adMefPerCosPerUniPh15MinIntervalNumber=adMefPerCosPerUniPh15MinIntervalNumber, adMefPerCosPerUniPh15MinQueueNumber=adMefPerCosPerUniPh15MinQueueNumber, adGenAosMefPerCosPerUniPerfHistoryConformance=adGenAosMefPerCosPerUniPerfHistoryConformance, adMefPerCosPerUniPh15MinIngressGreenOctets=adMefPerCosPerUniPh15MinIngressGreenOctets, adMefPerCosPerUniPhCurEgressGreenOctets1Day=adMefPerCosPerUniPhCurEgressGreenOctets1Day, adMefPerCosPerUniPhCurIngressGreenFrameDiscards1Day=adMefPerCosPerUniPhCurIngressGreenFrameDiscards1Day, adMefPerCosPerUniPhCurTable=adMefPerCosPerUniPhCurTable, adMefPerCosPerUniPhCurIngressGreenOctetDiscards15Min=adMefPerCosPerUniPhCurIngressGreenOctetDiscards15Min, adMefPerCosPerUniPh1DayIngressGreenFrameDiscards=adMefPerCosPerUniPh1DayIngressGreenFrameDiscards, adMefPerCosPerUniPh15MinEgressGreenFrames=adMefPerCosPerUniPh15MinEgressGreenFrames, adMefPerCosPerUniPh1DayIntervalTable=adMefPerCosPerUniPh1DayIntervalTable, adMefPerCosPerUniPhCurEgressGreenFrameDiscards15Min=adMefPerCosPerUniPhCurEgressGreenFrameDiscards15Min, adMefPerCosPerUniPh15MinEgressGreenOctets=adMefPerCosPerUniPh15MinEgressGreenOctets, adMefPerCosPerUniPh1DayEgressGreenFrames=adMefPerCosPerUniPh1DayEgressGreenFrames, adMefPerCosPerUniPhCurTimeElapsed15Min=adMefPerCosPerUniPhCurTimeElapsed15Min, adMefPerCosPerUniPhCurInvalidIntervals15Min=adMefPerCosPerUniPhCurInvalidIntervals15Min, adMefPerCosPerUniPh1DayEgressGreenOctetDiscards=adMefPerCosPerUniPh1DayEgressGreenOctetDiscards, adMefPerCosPerUniPh15MinIngressGreenFrames=adMefPerCosPerUniPh15MinIngressGreenFrames, adMefPerCosPerUniPh1DayIngressGreenOctets=adMefPerCosPerUniPh1DayIngressGreenOctets, adMefPerCosPerUniPh1DayIngressGreenFrames=adMefPerCosPerUniPh1DayIngressGreenFrames, adMefPerCosPerUniPh15MinIntervalEntry=adMefPerCosPerUniPh15MinIntervalEntry, adMefPerCosPerUniPhCurEgressGreenOctetDiscards15Min=adMefPerCosPerUniPhCurEgressGreenOctetDiscards15Min, adMefPerCosPerUniPhCurIngressGreenFrames15Min=adMefPerCosPerUniPhCurIngressGreenFrames15Min, PYSNMP_MODULE_ID=adGenAosMefPerCosPerUniPerfHistoryMib, adGenAosMefPerCosPerUniPerfHistoryMib=adGenAosMefPerCosPerUniPerfHistoryMib, adMefPerCosPerUniPhCurIngressGreenOctets15Min=adMefPerCosPerUniPhCurIngressGreenOctets15Min, adMefPerCosPerUniPh15MinEgressGreenFrameDiscards=adMefPerCosPerUniPh15MinEgressGreenFrameDiscards, adMefPerCosPerUniPhCurInvalidIntervals1Day=adMefPerCosPerUniPhCurInvalidIntervals1Day, adMefPerCosPerUniPhCurQueueNumber=adMefPerCosPerUniPhCurQueueNumber, adGenAosMefPerCosPerUniPerfHistoryCompliances=adGenAosMefPerCosPerUniPerfHistoryCompliances)
| 175.234694
| 5,171
| 0.830897
|
315e02522b31b8427a8825b26fa4a86383d84926
| 117,207
|
py
|
Python
|
sympy/printing/tests/test_latex.py
|
skieffer/sympy
|
23ab5c14881aef21409918939e0c8b78b7fcb06f
|
[
"BSD-3-Clause"
] | 1
|
2021-09-21T11:50:43.000Z
|
2021-09-21T11:50:43.000Z
|
sympy/printing/tests/test_latex.py
|
Nbede/sympy
|
2ead5bfe4716c36e1c0839c25aa6dc87cd85b0f2
|
[
"BSD-3-Clause"
] | null | null | null |
sympy/printing/tests/test_latex.py
|
Nbede/sympy
|
2ead5bfe4716c36e1c0839c25aa6dc87cd85b0f2
|
[
"BSD-3-Clause"
] | null | null | null |
from sympy.tensor.array.expressions.array_expressions import ArraySymbol, ArrayElement
from sympy.tensor.toperators import PartialDerivative
from sympy import (
Abs, Chi, Ci, CosineTransform, Dict, Ei, Eq, FallingFactorial,
FiniteSet, Float, FourierTransform, Function, Indexed, IndexedBase, Integral,
Interval, InverseCosineTransform, InverseFourierTransform, Derivative,
InverseLaplaceTransform, InverseMellinTransform, InverseSineTransform,
Lambda, LaplaceTransform, Limit, Matrix, Max, MellinTransform, Min, Mul,
Order, Piecewise, Poly, ring, field, ZZ, Pow, Product, Range, Rational, Integer,
RisingFactorial, rootof, RootSum, S, Shi, Si, SineTransform, Subs,
Sum, Symbol, ImageSet, Tuple, Ynm, Znm, arg, asin, acsc, asinh, Mod,
assoc_laguerre, assoc_legendre, beta, binomial, catalan, ceiling,
chebyshevt, chebyshevu, conjugate, cot, coth, diff, dirichlet_eta, euler,
exp, expint, factorial, factorial2, floor, gamma, gegenbauer, hermite,
hyper, im, jacobi, laguerre, legendre, lerchphi, log, frac,
meijerg, oo, polar_lift, polylog, re, root, sin, sqrt, symbols,
uppergamma, zeta, subfactorial, totient, elliptic_k, elliptic_f,
elliptic_e, elliptic_pi, cos, tan, Wild, true, false, Equivalent, Not,
Contains, divisor_sigma, SeqPer, SeqFormula, MatrixSlice,
SeqAdd, SeqMul, fourier_series, pi, ConditionSet, ComplexRegion, fps,
AccumBounds, reduced_totient, primenu, primeomega, SingularityFunction,
stieltjes, mathieuc, mathieus, mathieucprime, mathieusprime,
UnevaluatedExpr, Quaternion, I, KroneckerProduct, LambertW)
from sympy.ntheory.factor_ import udivisor_sigma
from sympy.abc import mu, tau
from sympy.printing.latex import (latex, translate, greek_letters_set,
tex_greek_dictionary, multiline_latex,
latex_escape, LatexPrinter)
from sympy.tensor.array import (ImmutableDenseNDimArray,
ImmutableSparseNDimArray,
MutableSparseNDimArray,
MutableDenseNDimArray,
tensorproduct)
from sympy.testing.pytest import XFAIL, raises, _both_exp_pow
from sympy.functions import DiracDelta, Heaviside, KroneckerDelta, LeviCivita
from sympy.functions.combinatorial.numbers import bernoulli, bell, lucas, \
fibonacci, tribonacci
from sympy.logic import Implies
from sympy.logic.boolalg import And, Or, Xor
from sympy.physics.control.lti import TransferFunction, Series, Parallel, \
Feedback, TransferFunctionMatrix, MIMOSeries, MIMOParallel, MIMOFeedback
from sympy.physics.quantum import Commutator, Operator
from sympy.physics.units import meter, gibibyte, microgram, second
from sympy.core.trace import Tr
from sympy.combinatorics.permutations import \
Cycle, Permutation, AppliedPermutation
from sympy.matrices.expressions.permutation import PermutationMatrix
from sympy import MatrixSymbol, ln
from sympy.vector import CoordSys3D, Cross, Curl, Dot, Divergence, Gradient, Laplacian
from sympy.sets.setexpr import SetExpr
from sympy.sets.sets import \
Union, Intersection, Complement, SymmetricDifference, ProductSet
import sympy as sym
class lowergamma(sym.lowergamma):
pass # testing notation inheritance by a subclass with same name
x, y, z, t, w, a, b, c, s, p = symbols('x y z t w a b c s p')
k, m, n = symbols('k m n', integer=True)
def test_printmethod():
class R(Abs):
def _latex(self, printer):
return "foo(%s)" % printer._print(self.args[0])
assert latex(R(x)) == r"foo(x)"
class R(Abs):
def _latex(self, printer):
return "foo"
assert latex(R(x)) == r"foo"
def test_latex_basic():
assert latex(1 + x) == r"x + 1"
assert latex(x**2) == r"x^{2}"
assert latex(x**(1 + x)) == r"x^{x + 1}"
assert latex(x**3 + x + 1 + x**2) == r"x^{3} + x^{2} + x + 1"
assert latex(2*x*y) == r"2 x y"
assert latex(2*x*y, mul_symbol='dot') == r"2 \cdot x \cdot y"
assert latex(3*x**2*y, mul_symbol='\\,') == r"3\,x^{2}\,y"
assert latex(1.5*3**x, mul_symbol='\\,') == r"1.5 \cdot 3^{x}"
assert latex(Mul(0, 1, evaluate=False)) == r'0 \cdot 1'
assert latex(Mul(1, 0, evaluate=False)) == r'1 \cdot 0'
assert latex(Mul(1, 1, evaluate=False)) == r'1 \cdot 1'
assert latex(Mul(-1, 1, evaluate=False)) == r'\left(-1\right) 1'
assert latex(Mul(1, 1, 1, evaluate=False)) == r'1 \cdot 1 \cdot 1'
assert latex(Mul(1, 2, evaluate=False)) == r'1 \cdot 2'
assert latex(Mul(1, S.Half, evaluate=False)) == r'1 \frac{1}{2}'
assert latex(Mul(1, 1, S.Half, evaluate=False)) == \
r'1 \cdot 1 \frac{1}{2}'
assert latex(Mul(1, 1, 2, 3, x, evaluate=False)) == \
r'1 \cdot 1 \cdot 2 \cdot 3 x'
assert latex(Mul(1, -1, evaluate=False)) == r'1 \left(-1\right)'
assert latex(Mul(4, 3, 2, 1, 0, y, x, evaluate=False)) == \
r'4 \cdot 3 \cdot 2 \cdot 1 \cdot 0 y x'
assert latex(Mul(4, 3, 2, 1+z, 0, y, x, evaluate=False)) == \
r'4 \cdot 3 \cdot 2 \left(z + 1\right) 0 y x'
assert latex(Mul(Rational(2, 3), Rational(5, 7), evaluate=False)) == \
r'\frac{2}{3} \frac{5}{7}'
assert latex(1/x) == r"\frac{1}{x}"
assert latex(1/x, fold_short_frac=True) == r"1 / x"
assert latex(-S(3)/2) == r"- \frac{3}{2}"
assert latex(-S(3)/2, fold_short_frac=True) == r"- 3 / 2"
assert latex(1/x**2) == r"\frac{1}{x^{2}}"
assert latex(1/(x + y)/2) == r"\frac{1}{2 \left(x + y\right)}"
assert latex(x/2) == r"\frac{x}{2}"
assert latex(x/2, fold_short_frac=True) == r"x / 2"
assert latex((x + y)/(2*x)) == r"\frac{x + y}{2 x}"
assert latex((x + y)/(2*x), fold_short_frac=True) == \
r"\left(x + y\right) / 2 x"
assert latex((x + y)/(2*x), long_frac_ratio=0) == \
r"\frac{1}{2 x} \left(x + y\right)"
assert latex((x + y)/x) == r"\frac{x + y}{x}"
assert latex((x + y)/x, long_frac_ratio=3) == r"\frac{x + y}{x}"
assert latex((2*sqrt(2)*x)/3) == r"\frac{2 \sqrt{2} x}{3}"
assert latex((2*sqrt(2)*x)/3, long_frac_ratio=2) == \
r"\frac{2 x}{3} \sqrt{2}"
assert latex(binomial(x, y)) == r"{\binom{x}{y}}"
x_star = Symbol('x^*')
f = Function('f')
assert latex(x_star**2) == r"\left(x^{*}\right)^{2}"
assert latex(x_star**2, parenthesize_super=False) == r"{x^{*}}^{2}"
assert latex(Derivative(f(x_star), x_star,2)) == r"\frac{d^{2}}{d \left(x^{*}\right)^{2}} f{\left(x^{*} \right)}"
assert latex(Derivative(f(x_star), x_star,2), parenthesize_super=False) == r"\frac{d^{2}}{d {x^{*}}^{2}} f{\left(x^{*} \right)}"
assert latex(2*Integral(x, x)/3) == r"\frac{2 \int x\, dx}{3}"
assert latex(2*Integral(x, x)/3, fold_short_frac=True) == \
r"\left(2 \int x\, dx\right) / 3"
assert latex(sqrt(x)) == r"\sqrt{x}"
assert latex(x**Rational(1, 3)) == r"\sqrt[3]{x}"
assert latex(x**Rational(1, 3), root_notation=False) == r"x^{\frac{1}{3}}"
assert latex(sqrt(x)**3) == r"x^{\frac{3}{2}}"
assert latex(sqrt(x), itex=True) == r"\sqrt{x}"
assert latex(x**Rational(1, 3), itex=True) == r"\root{3}{x}"
assert latex(sqrt(x)**3, itex=True) == r"x^{\frac{3}{2}}"
assert latex(x**Rational(3, 4)) == r"x^{\frac{3}{4}}"
assert latex(x**Rational(3, 4), fold_frac_powers=True) == r"x^{3/4}"
assert latex((x + 1)**Rational(3, 4)) == \
r"\left(x + 1\right)^{\frac{3}{4}}"
assert latex((x + 1)**Rational(3, 4), fold_frac_powers=True) == \
r"\left(x + 1\right)^{3/4}"
assert latex(1.5e20*x) == r"1.5 \cdot 10^{20} x"
assert latex(1.5e20*x, mul_symbol='dot') == r"1.5 \cdot 10^{20} \cdot x"
assert latex(1.5e20*x, mul_symbol='times') == \
r"1.5 \times 10^{20} \times x"
assert latex(1/sin(x)) == r"\frac{1}{\sin{\left(x \right)}}"
assert latex(sin(x)**-1) == r"\frac{1}{\sin{\left(x \right)}}"
assert latex(sin(x)**Rational(3, 2)) == \
r"\sin^{\frac{3}{2}}{\left(x \right)}"
assert latex(sin(x)**Rational(3, 2), fold_frac_powers=True) == \
r"\sin^{3/2}{\left(x \right)}"
assert latex(~x) == r"\neg x"
assert latex(x & y) == r"x \wedge y"
assert latex(x & y & z) == r"x \wedge y \wedge z"
assert latex(x | y) == r"x \vee y"
assert latex(x | y | z) == r"x \vee y \vee z"
assert latex((x & y) | z) == r"z \vee \left(x \wedge y\right)"
assert latex(Implies(x, y)) == r"x \Rightarrow y"
assert latex(~(x >> ~y)) == r"x \not\Rightarrow \neg y"
assert latex(Implies(Or(x,y), z)) == r"\left(x \vee y\right) \Rightarrow z"
assert latex(Implies(z, Or(x,y))) == r"z \Rightarrow \left(x \vee y\right)"
assert latex(~(x & y)) == r"\neg \left(x \wedge y\right)"
assert latex(~x, symbol_names={x: "x_i"}) == r"\neg x_i"
assert latex(x & y, symbol_names={x: "x_i", y: "y_i"}) == \
r"x_i \wedge y_i"
assert latex(x & y & z, symbol_names={x: "x_i", y: "y_i", z: "z_i"}) == \
r"x_i \wedge y_i \wedge z_i"
assert latex(x | y, symbol_names={x: "x_i", y: "y_i"}) == r"x_i \vee y_i"
assert latex(x | y | z, symbol_names={x: "x_i", y: "y_i", z: "z_i"}) == \
r"x_i \vee y_i \vee z_i"
assert latex((x & y) | z, symbol_names={x: "x_i", y: "y_i", z: "z_i"}) == \
r"z_i \vee \left(x_i \wedge y_i\right)"
assert latex(Implies(x, y), symbol_names={x: "x_i", y: "y_i"}) == \
r"x_i \Rightarrow y_i"
assert latex(Pow(Rational(1, 3), -1, evaluate=False)) == r"\frac{1}{\frac{1}{3}}"
assert latex(Pow(Rational(1, 3), -2, evaluate=False)) == r"\frac{1}{(\frac{1}{3})^{2}}"
assert latex(Pow(Integer(1)/100, -1, evaluate=False)) == r"\frac{1}{\frac{1}{100}}"
p = Symbol('p', positive=True)
assert latex(exp(-p)*log(p)) == r"e^{- p} \log{\left(p \right)}"
def test_latex_builtins():
assert latex(True) == r"\text{True}"
assert latex(False) == r"\text{False}"
assert latex(None) == r"\text{None}"
assert latex(true) == r"\text{True}"
assert latex(false) == r'\text{False}'
def test_latex_SingularityFunction():
assert latex(SingularityFunction(x, 4, 5)) == \
r"{\left\langle x - 4 \right\rangle}^{5}"
assert latex(SingularityFunction(x, -3, 4)) == \
r"{\left\langle x + 3 \right\rangle}^{4}"
assert latex(SingularityFunction(x, 0, 4)) == \
r"{\left\langle x \right\rangle}^{4}"
assert latex(SingularityFunction(x, a, n)) == \
r"{\left\langle - a + x \right\rangle}^{n}"
assert latex(SingularityFunction(x, 4, -2)) == \
r"{\left\langle x - 4 \right\rangle}^{-2}"
assert latex(SingularityFunction(x, 4, -1)) == \
r"{\left\langle x - 4 \right\rangle}^{-1}"
assert latex(SingularityFunction(x, 4, 5)**3) == \
r"{\left({\langle x - 4 \rangle}^{5}\right)}^{3}"
assert latex(SingularityFunction(x, -3, 4)**3) == \
r"{\left({\langle x + 3 \rangle}^{4}\right)}^{3}"
assert latex(SingularityFunction(x, 0, 4)**3) == \
r"{\left({\langle x \rangle}^{4}\right)}^{3}"
assert latex(SingularityFunction(x, a, n)**3) == \
r"{\left({\langle - a + x \rangle}^{n}\right)}^{3}"
assert latex(SingularityFunction(x, 4, -2)**3) == \
r"{\left({\langle x - 4 \rangle}^{-2}\right)}^{3}"
assert latex((SingularityFunction(x, 4, -1)**3)**3) == \
r"{\left({\langle x - 4 \rangle}^{-1}\right)}^{9}"
def test_latex_cycle():
assert latex(Cycle(1, 2, 4)) == r"\left( 1\; 2\; 4\right)"
assert latex(Cycle(1, 2)(4, 5, 6)) == \
r"\left( 1\; 2\right)\left( 4\; 5\; 6\right)"
assert latex(Cycle()) == r"\left( \right)"
def test_latex_permutation():
assert latex(Permutation(1, 2, 4)) == r"\left( 1\; 2\; 4\right)"
assert latex(Permutation(1, 2)(4, 5, 6)) == \
r"\left( 1\; 2\right)\left( 4\; 5\; 6\right)"
assert latex(Permutation()) == r"\left( \right)"
assert latex(Permutation(2, 4)*Permutation(5)) == \
r"\left( 2\; 4\right)\left( 5\right)"
assert latex(Permutation(5)) == r"\left( 5\right)"
assert latex(Permutation(0, 1), perm_cyclic=False) == \
r"\begin{pmatrix} 0 & 1 \\ 1 & 0 \end{pmatrix}"
assert latex(Permutation(0, 1)(2, 3), perm_cyclic=False) == \
r"\begin{pmatrix} 0 & 1 & 2 & 3 \\ 1 & 0 & 3 & 2 \end{pmatrix}"
assert latex(Permutation(), perm_cyclic=False) == \
r"\left( \right)"
def test_latex_Float():
assert latex(Float(1.0e100)) == r"1.0 \cdot 10^{100}"
assert latex(Float(1.0e-100)) == r"1.0 \cdot 10^{-100}"
assert latex(Float(1.0e-100), mul_symbol="times") == \
r"1.0 \times 10^{-100}"
assert latex(Float('10000.0'), full_prec=False, min=-2, max=2) == \
r"1.0 \cdot 10^{4}"
assert latex(Float('10000.0'), full_prec=False, min=-2, max=4) == \
r"1.0 \cdot 10^{4}"
assert latex(Float('10000.0'), full_prec=False, min=-2, max=5) == \
r"10000.0"
assert latex(Float('0.099999'), full_prec=True, min=-2, max=5) == \
r"9.99990000000000 \cdot 10^{-2}"
def test_latex_vector_expressions():
A = CoordSys3D('A')
assert latex(Cross(A.i, A.j*A.x*3+A.k)) == \
r"\mathbf{\hat{i}_{A}} \times \left((3 \mathbf{{x}_{A}})\mathbf{\hat{j}_{A}} + \mathbf{\hat{k}_{A}}\right)"
assert latex(Cross(A.i, A.j)) == \
r"\mathbf{\hat{i}_{A}} \times \mathbf{\hat{j}_{A}}"
assert latex(x*Cross(A.i, A.j)) == \
r"x \left(\mathbf{\hat{i}_{A}} \times \mathbf{\hat{j}_{A}}\right)"
assert latex(Cross(x*A.i, A.j)) == \
r'- \mathbf{\hat{j}_{A}} \times \left((x)\mathbf{\hat{i}_{A}}\right)'
assert latex(Curl(3*A.x*A.j)) == \
r"\nabla\times \left((3 \mathbf{{x}_{A}})\mathbf{\hat{j}_{A}}\right)"
assert latex(Curl(3*A.x*A.j+A.i)) == \
r"\nabla\times \left(\mathbf{\hat{i}_{A}} + (3 \mathbf{{x}_{A}})\mathbf{\hat{j}_{A}}\right)"
assert latex(Curl(3*x*A.x*A.j)) == \
r"\nabla\times \left((3 \mathbf{{x}_{A}} x)\mathbf{\hat{j}_{A}}\right)"
assert latex(x*Curl(3*A.x*A.j)) == \
r"x \left(\nabla\times \left((3 \mathbf{{x}_{A}})\mathbf{\hat{j}_{A}}\right)\right)"
assert latex(Divergence(3*A.x*A.j+A.i)) == \
r"\nabla\cdot \left(\mathbf{\hat{i}_{A}} + (3 \mathbf{{x}_{A}})\mathbf{\hat{j}_{A}}\right)"
assert latex(Divergence(3*A.x*A.j)) == \
r"\nabla\cdot \left((3 \mathbf{{x}_{A}})\mathbf{\hat{j}_{A}}\right)"
assert latex(x*Divergence(3*A.x*A.j)) == \
r"x \left(\nabla\cdot \left((3 \mathbf{{x}_{A}})\mathbf{\hat{j}_{A}}\right)\right)"
assert latex(Dot(A.i, A.j*A.x*3+A.k)) == \
r"\mathbf{\hat{i}_{A}} \cdot \left((3 \mathbf{{x}_{A}})\mathbf{\hat{j}_{A}} + \mathbf{\hat{k}_{A}}\right)"
assert latex(Dot(A.i, A.j)) == \
r"\mathbf{\hat{i}_{A}} \cdot \mathbf{\hat{j}_{A}}"
assert latex(Dot(x*A.i, A.j)) == \
r"\mathbf{\hat{j}_{A}} \cdot \left((x)\mathbf{\hat{i}_{A}}\right)"
assert latex(x*Dot(A.i, A.j)) == \
r"x \left(\mathbf{\hat{i}_{A}} \cdot \mathbf{\hat{j}_{A}}\right)"
assert latex(Gradient(A.x)) == r"\nabla \mathbf{{x}_{A}}"
assert latex(Gradient(A.x + 3*A.y)) == \
r"\nabla \left(\mathbf{{x}_{A}} + 3 \mathbf{{y}_{A}}\right)"
assert latex(x*Gradient(A.x)) == r"x \left(\nabla \mathbf{{x}_{A}}\right)"
assert latex(Gradient(x*A.x)) == r"\nabla \left(\mathbf{{x}_{A}} x\right)"
assert latex(Laplacian(A.x)) == r"\triangle \mathbf{{x}_{A}}"
assert latex(Laplacian(A.x + 3*A.y)) == \
r"\triangle \left(\mathbf{{x}_{A}} + 3 \mathbf{{y}_{A}}\right)"
assert latex(x*Laplacian(A.x)) == r"x \left(\triangle \mathbf{{x}_{A}}\right)"
assert latex(Laplacian(x*A.x)) == r"\triangle \left(\mathbf{{x}_{A}} x\right)"
def test_latex_symbols():
Gamma, lmbda, rho = symbols('Gamma, lambda, rho')
tau, Tau, TAU, taU = symbols('tau, Tau, TAU, taU')
assert latex(tau) == r"\tau"
assert latex(Tau) == r"T"
assert latex(TAU) == r"\tau"
assert latex(taU) == r"\tau"
# Check that all capitalized greek letters are handled explicitly
capitalized_letters = {l.capitalize() for l in greek_letters_set}
assert len(capitalized_letters - set(tex_greek_dictionary.keys())) == 0
assert latex(Gamma + lmbda) == r"\Gamma + \lambda"
assert latex(Gamma * lmbda) == r"\Gamma \lambda"
assert latex(Symbol('q1')) == r"q_{1}"
assert latex(Symbol('q21')) == r"q_{21}"
assert latex(Symbol('epsilon0')) == r"\epsilon_{0}"
assert latex(Symbol('omega1')) == r"\omega_{1}"
assert latex(Symbol('91')) == r"91"
assert latex(Symbol('alpha_new')) == r"\alpha_{new}"
assert latex(Symbol('C^orig')) == r"C^{orig}"
assert latex(Symbol('x^alpha')) == r"x^{\alpha}"
assert latex(Symbol('beta^alpha')) == r"\beta^{\alpha}"
assert latex(Symbol('e^Alpha')) == r"e^{A}"
assert latex(Symbol('omega_alpha^beta')) == r"\omega^{\beta}_{\alpha}"
assert latex(Symbol('omega') ** Symbol('beta')) == r"\omega^{\beta}"
@XFAIL
def test_latex_symbols_failing():
rho, mass, volume = symbols('rho, mass, volume')
assert latex(
volume * rho == mass) == r"\rho \mathrm{volume} = \mathrm{mass}"
assert latex(volume / mass * rho == 1) == \
r"\rho \mathrm{volume} {\mathrm{mass}}^{(-1)} = 1"
assert latex(mass**3 * volume**3) == \
r"{\mathrm{mass}}^{3} \cdot {\mathrm{volume}}^{3}"
@_both_exp_pow
def test_latex_functions():
assert latex(exp(x)) == r"e^{x}"
assert latex(exp(1) + exp(2)) == r"e + e^{2}"
f = Function('f')
assert latex(f(x)) == r'f{\left(x \right)}'
assert latex(f) == r'f'
g = Function('g')
assert latex(g(x, y)) == r'g{\left(x,y \right)}'
assert latex(g) == r'g'
h = Function('h')
assert latex(h(x, y, z)) == r'h{\left(x,y,z \right)}'
assert latex(h) == r'h'
Li = Function('Li')
assert latex(Li) == r'\operatorname{Li}'
assert latex(Li(x)) == r'\operatorname{Li}{\left(x \right)}'
mybeta = Function('beta')
# not to be confused with the beta function
assert latex(mybeta(x, y, z)) == r"\beta{\left(x,y,z \right)}"
assert latex(beta(x, y)) == r'\operatorname{B}\left(x, y\right)'
assert latex(beta(x, y)**2) == r'\operatorname{B}^{2}\left(x, y\right)'
assert latex(mybeta(x)) == r"\beta{\left(x \right)}"
assert latex(mybeta) == r"\beta"
g = Function('gamma')
# not to be confused with the gamma function
assert latex(g(x, y, z)) == r"\gamma{\left(x,y,z \right)}"
assert latex(g(x)) == r"\gamma{\left(x \right)}"
assert latex(g) == r"\gamma"
a1 = Function('a_1')
assert latex(a1) == r"\operatorname{a_{1}}"
assert latex(a1(x)) == r"\operatorname{a_{1}}{\left(x \right)}"
# issue 5868
omega1 = Function('omega1')
assert latex(omega1) == r"\omega_{1}"
assert latex(omega1(x)) == r"\omega_{1}{\left(x \right)}"
assert latex(sin(x)) == r"\sin{\left(x \right)}"
assert latex(sin(x), fold_func_brackets=True) == r"\sin {x}"
assert latex(sin(2*x**2), fold_func_brackets=True) == \
r"\sin {2 x^{2}}"
assert latex(sin(x**2), fold_func_brackets=True) == \
r"\sin {x^{2}}"
assert latex(asin(x)**2) == r"\operatorname{asin}^{2}{\left(x \right)}"
assert latex(asin(x)**2, inv_trig_style="full") == \
r"\arcsin^{2}{\left(x \right)}"
assert latex(asin(x)**2, inv_trig_style="power") == \
r"\sin^{-1}{\left(x \right)}^{2}"
assert latex(asin(x**2), inv_trig_style="power",
fold_func_brackets=True) == \
r"\sin^{-1} {x^{2}}"
assert latex(acsc(x), inv_trig_style="full") == \
r"\operatorname{arccsc}{\left(x \right)}"
assert latex(asinh(x), inv_trig_style="full") == \
r"\operatorname{arcsinh}{\left(x \right)}"
assert latex(factorial(k)) == r"k!"
assert latex(factorial(-k)) == r"\left(- k\right)!"
assert latex(factorial(k)**2) == r"k!^{2}"
assert latex(subfactorial(k)) == r"!k"
assert latex(subfactorial(-k)) == r"!\left(- k\right)"
assert latex(subfactorial(k)**2) == r"\left(!k\right)^{2}"
assert latex(factorial2(k)) == r"k!!"
assert latex(factorial2(-k)) == r"\left(- k\right)!!"
assert latex(factorial2(k)**2) == r"k!!^{2}"
assert latex(binomial(2, k)) == r"{\binom{2}{k}}"
assert latex(binomial(2, k)**2) == r"{\binom{2}{k}}^{2}"
assert latex(FallingFactorial(3, k)) == r"{\left(3\right)}_{k}"
assert latex(RisingFactorial(3, k)) == r"{3}^{\left(k\right)}"
assert latex(floor(x)) == r"\left\lfloor{x}\right\rfloor"
assert latex(ceiling(x)) == r"\left\lceil{x}\right\rceil"
assert latex(frac(x)) == r"\operatorname{frac}{\left(x\right)}"
assert latex(floor(x)**2) == r"\left\lfloor{x}\right\rfloor^{2}"
assert latex(ceiling(x)**2) == r"\left\lceil{x}\right\rceil^{2}"
assert latex(frac(x)**2) == r"\operatorname{frac}{\left(x\right)}^{2}"
assert latex(Min(x, 2, x**3)) == r"\min\left(2, x, x^{3}\right)"
assert latex(Min(x, y)**2) == r"\min\left(x, y\right)^{2}"
assert latex(Max(x, 2, x**3)) == r"\max\left(2, x, x^{3}\right)"
assert latex(Max(x, y)**2) == r"\max\left(x, y\right)^{2}"
assert latex(Abs(x)) == r"\left|{x}\right|"
assert latex(Abs(x)**2) == r"\left|{x}\right|^{2}"
assert latex(re(x)) == r"\operatorname{re}{\left(x\right)}"
assert latex(re(x + y)) == \
r"\operatorname{re}{\left(x\right)} + \operatorname{re}{\left(y\right)}"
assert latex(im(x)) == r"\operatorname{im}{\left(x\right)}"
assert latex(conjugate(x)) == r"\overline{x}"
assert latex(conjugate(x)**2) == r"\overline{x}^{2}"
assert latex(conjugate(x**2)) == r"\overline{x}^{2}"
assert latex(gamma(x)) == r"\Gamma\left(x\right)"
w = Wild('w')
assert latex(gamma(w)) == r"\Gamma\left(w\right)"
assert latex(Order(x)) == r"O\left(x\right)"
assert latex(Order(x, x)) == r"O\left(x\right)"
assert latex(Order(x, (x, 0))) == r"O\left(x\right)"
assert latex(Order(x, (x, oo))) == r"O\left(x; x\rightarrow \infty\right)"
assert latex(Order(x - y, (x, y))) == \
r"O\left(x - y; x\rightarrow y\right)"
assert latex(Order(x, x, y)) == \
r"O\left(x; \left( x, \ y\right)\rightarrow \left( 0, \ 0\right)\right)"
assert latex(Order(x, x, y)) == \
r"O\left(x; \left( x, \ y\right)\rightarrow \left( 0, \ 0\right)\right)"
assert latex(Order(x, (x, oo), (y, oo))) == \
r"O\left(x; \left( x, \ y\right)\rightarrow \left( \infty, \ \infty\right)\right)"
assert latex(lowergamma(x, y)) == r'\gamma\left(x, y\right)'
assert latex(lowergamma(x, y)**2) == r'\gamma^{2}\left(x, y\right)'
assert latex(uppergamma(x, y)) == r'\Gamma\left(x, y\right)'
assert latex(uppergamma(x, y)**2) == r'\Gamma^{2}\left(x, y\right)'
assert latex(cot(x)) == r'\cot{\left(x \right)}'
assert latex(coth(x)) == r'\coth{\left(x \right)}'
assert latex(re(x)) == r'\operatorname{re}{\left(x\right)}'
assert latex(im(x)) == r'\operatorname{im}{\left(x\right)}'
assert latex(root(x, y)) == r'x^{\frac{1}{y}}'
assert latex(arg(x)) == r'\arg{\left(x \right)}'
assert latex(zeta(x)) == r"\zeta\left(x\right)"
assert latex(zeta(x)**2) == r"\zeta^{2}\left(x\right)"
assert latex(zeta(x, y)) == r"\zeta\left(x, y\right)"
assert latex(zeta(x, y)**2) == r"\zeta^{2}\left(x, y\right)"
assert latex(dirichlet_eta(x)) == r"\eta\left(x\right)"
assert latex(dirichlet_eta(x)**2) == r"\eta^{2}\left(x\right)"
assert latex(polylog(x, y)) == r"\operatorname{Li}_{x}\left(y\right)"
assert latex(
polylog(x, y)**2) == r"\operatorname{Li}_{x}^{2}\left(y\right)"
assert latex(lerchphi(x, y, n)) == r"\Phi\left(x, y, n\right)"
assert latex(lerchphi(x, y, n)**2) == r"\Phi^{2}\left(x, y, n\right)"
assert latex(stieltjes(x)) == r"\gamma_{x}"
assert latex(stieltjes(x)**2) == r"\gamma_{x}^{2}"
assert latex(stieltjes(x, y)) == r"\gamma_{x}\left(y\right)"
assert latex(stieltjes(x, y)**2) == r"\gamma_{x}\left(y\right)^{2}"
assert latex(elliptic_k(z)) == r"K\left(z\right)"
assert latex(elliptic_k(z)**2) == r"K^{2}\left(z\right)"
assert latex(elliptic_f(x, y)) == r"F\left(x\middle| y\right)"
assert latex(elliptic_f(x, y)**2) == r"F^{2}\left(x\middle| y\right)"
assert latex(elliptic_e(x, y)) == r"E\left(x\middle| y\right)"
assert latex(elliptic_e(x, y)**2) == r"E^{2}\left(x\middle| y\right)"
assert latex(elliptic_e(z)) == r"E\left(z\right)"
assert latex(elliptic_e(z)**2) == r"E^{2}\left(z\right)"
assert latex(elliptic_pi(x, y, z)) == r"\Pi\left(x; y\middle| z\right)"
assert latex(elliptic_pi(x, y, z)**2) == \
r"\Pi^{2}\left(x; y\middle| z\right)"
assert latex(elliptic_pi(x, y)) == r"\Pi\left(x\middle| y\right)"
assert latex(elliptic_pi(x, y)**2) == r"\Pi^{2}\left(x\middle| y\right)"
assert latex(Ei(x)) == r'\operatorname{Ei}{\left(x \right)}'
assert latex(Ei(x)**2) == r'\operatorname{Ei}^{2}{\left(x \right)}'
assert latex(expint(x, y)) == r'\operatorname{E}_{x}\left(y\right)'
assert latex(expint(x, y)**2) == r'\operatorname{E}_{x}^{2}\left(y\right)'
assert latex(Shi(x)**2) == r'\operatorname{Shi}^{2}{\left(x \right)}'
assert latex(Si(x)**2) == r'\operatorname{Si}^{2}{\left(x \right)}'
assert latex(Ci(x)**2) == r'\operatorname{Ci}^{2}{\left(x \right)}'
assert latex(Chi(x)**2) == r'\operatorname{Chi}^{2}\left(x\right)'
assert latex(Chi(x)) == r'\operatorname{Chi}\left(x\right)'
assert latex(jacobi(n, a, b, x)) == \
r'P_{n}^{\left(a,b\right)}\left(x\right)'
assert latex(jacobi(n, a, b, x)**2) == \
r'\left(P_{n}^{\left(a,b\right)}\left(x\right)\right)^{2}'
assert latex(gegenbauer(n, a, x)) == \
r'C_{n}^{\left(a\right)}\left(x\right)'
assert latex(gegenbauer(n, a, x)**2) == \
r'\left(C_{n}^{\left(a\right)}\left(x\right)\right)^{2}'
assert latex(chebyshevt(n, x)) == r'T_{n}\left(x\right)'
assert latex(chebyshevt(n, x)**2) == \
r'\left(T_{n}\left(x\right)\right)^{2}'
assert latex(chebyshevu(n, x)) == r'U_{n}\left(x\right)'
assert latex(chebyshevu(n, x)**2) == \
r'\left(U_{n}\left(x\right)\right)^{2}'
assert latex(legendre(n, x)) == r'P_{n}\left(x\right)'
assert latex(legendre(n, x)**2) == r'\left(P_{n}\left(x\right)\right)^{2}'
assert latex(assoc_legendre(n, a, x)) == \
r'P_{n}^{\left(a\right)}\left(x\right)'
assert latex(assoc_legendre(n, a, x)**2) == \
r'\left(P_{n}^{\left(a\right)}\left(x\right)\right)^{2}'
assert latex(laguerre(n, x)) == r'L_{n}\left(x\right)'
assert latex(laguerre(n, x)**2) == r'\left(L_{n}\left(x\right)\right)^{2}'
assert latex(assoc_laguerre(n, a, x)) == \
r'L_{n}^{\left(a\right)}\left(x\right)'
assert latex(assoc_laguerre(n, a, x)**2) == \
r'\left(L_{n}^{\left(a\right)}\left(x\right)\right)^{2}'
assert latex(hermite(n, x)) == r'H_{n}\left(x\right)'
assert latex(hermite(n, x)**2) == r'\left(H_{n}\left(x\right)\right)^{2}'
theta = Symbol("theta", real=True)
phi = Symbol("phi", real=True)
assert latex(Ynm(n, m, theta, phi)) == r'Y_{n}^{m}\left(\theta,\phi\right)'
assert latex(Ynm(n, m, theta, phi)**3) == \
r'\left(Y_{n}^{m}\left(\theta,\phi\right)\right)^{3}'
assert latex(Znm(n, m, theta, phi)) == r'Z_{n}^{m}\left(\theta,\phi\right)'
assert latex(Znm(n, m, theta, phi)**3) == \
r'\left(Z_{n}^{m}\left(\theta,\phi\right)\right)^{3}'
# Test latex printing of function names with "_"
assert latex(polar_lift(0)) == \
r"\operatorname{polar\_lift}{\left(0 \right)}"
assert latex(polar_lift(0)**3) == \
r"\operatorname{polar\_lift}^{3}{\left(0 \right)}"
assert latex(totient(n)) == r'\phi\left(n\right)'
assert latex(totient(n) ** 2) == r'\left(\phi\left(n\right)\right)^{2}'
assert latex(reduced_totient(n)) == r'\lambda\left(n\right)'
assert latex(reduced_totient(n) ** 2) == \
r'\left(\lambda\left(n\right)\right)^{2}'
assert latex(divisor_sigma(x)) == r"\sigma\left(x\right)"
assert latex(divisor_sigma(x)**2) == r"\sigma^{2}\left(x\right)"
assert latex(divisor_sigma(x, y)) == r"\sigma_y\left(x\right)"
assert latex(divisor_sigma(x, y)**2) == r"\sigma^{2}_y\left(x\right)"
assert latex(udivisor_sigma(x)) == r"\sigma^*\left(x\right)"
assert latex(udivisor_sigma(x)**2) == r"\sigma^*^{2}\left(x\right)"
assert latex(udivisor_sigma(x, y)) == r"\sigma^*_y\left(x\right)"
assert latex(udivisor_sigma(x, y)**2) == r"\sigma^*^{2}_y\left(x\right)"
assert latex(primenu(n)) == r'\nu\left(n\right)'
assert latex(primenu(n) ** 2) == r'\left(\nu\left(n\right)\right)^{2}'
assert latex(primeomega(n)) == r'\Omega\left(n\right)'
assert latex(primeomega(n) ** 2) == \
r'\left(\Omega\left(n\right)\right)^{2}'
assert latex(LambertW(n)) == r'W\left(n\right)'
assert latex(LambertW(n, -1)) == r'W_{-1}\left(n\right)'
assert latex(LambertW(n, k)) == r'W_{k}\left(n\right)'
assert latex(LambertW(n) * LambertW(n)) == r"W^{2}\left(n\right)"
assert latex(Pow(LambertW(n), 2)) == r"W^{2}\left(n\right)"
assert latex(LambertW(n)**k) == r"W^{k}\left(n\right)"
assert latex(LambertW(n, k)**p) == r"W^{p}_{k}\left(n\right)"
assert latex(Mod(x, 7)) == r'x \bmod 7'
assert latex(Mod(x + 1, 7)) == r'\left(x + 1\right) \bmod 7'
assert latex(Mod(7, x + 1)) == r'7 \bmod \left(x + 1\right)'
assert latex(Mod(2 * x, 7)) == r'2 x \bmod 7'
assert latex(Mod(7, 2 * x)) == r'7 \bmod 2 x'
assert latex(Mod(x, 7) + 1) == r'\left(x \bmod 7\right) + 1'
assert latex(2 * Mod(x, 7)) == r'2 \left(x \bmod 7\right)'
assert latex(Mod(7, 2 * x)**n) == r'\left(7 \bmod 2 x\right)^{n}'
# some unknown function name should get rendered with \operatorname
fjlkd = Function('fjlkd')
assert latex(fjlkd(x)) == r'\operatorname{fjlkd}{\left(x \right)}'
# even when it is referred to without an argument
assert latex(fjlkd) == r'\operatorname{fjlkd}'
# test that notation passes to subclasses of the same name only
def test_function_subclass_different_name():
class mygamma(gamma):
pass
assert latex(mygamma) == r"\operatorname{mygamma}"
assert latex(mygamma(x)) == r"\operatorname{mygamma}{\left(x \right)}"
def test_hyper_printing():
from sympy.abc import x, z
assert latex(meijerg(Tuple(pi, pi, x), Tuple(1),
(0, 1), Tuple(1, 2, 3/pi), z)) == \
r'{G_{4, 5}^{2, 3}\left(\begin{matrix} \pi, \pi, x & 1 \\0, 1 & 1, 2, '\
r'\frac{3}{\pi} \end{matrix} \middle| {z} \right)}'
assert latex(meijerg(Tuple(), Tuple(1), (0,), Tuple(), z)) == \
r'{G_{1, 1}^{1, 0}\left(\begin{matrix} & 1 \\0 & \end{matrix} \middle| {z} \right)}'
assert latex(hyper((x, 2), (3,), z)) == \
r'{{}_{2}F_{1}\left(\begin{matrix} x, 2 ' \
r'\\ 3 \end{matrix}\middle| {z} \right)}'
assert latex(hyper(Tuple(), Tuple(1), z)) == \
r'{{}_{0}F_{1}\left(\begin{matrix} ' \
r'\\ 1 \end{matrix}\middle| {z} \right)}'
def test_latex_bessel():
from sympy.functions.special.bessel import (besselj, bessely, besseli,
besselk, hankel1, hankel2,
jn, yn, hn1, hn2)
from sympy.abc import z
assert latex(besselj(n, z**2)**k) == r'J^{k}_{n}\left(z^{2}\right)'
assert latex(bessely(n, z)) == r'Y_{n}\left(z\right)'
assert latex(besseli(n, z)) == r'I_{n}\left(z\right)'
assert latex(besselk(n, z)) == r'K_{n}\left(z\right)'
assert latex(hankel1(n, z**2)**2) == \
r'\left(H^{(1)}_{n}\left(z^{2}\right)\right)^{2}'
assert latex(hankel2(n, z)) == r'H^{(2)}_{n}\left(z\right)'
assert latex(jn(n, z)) == r'j_{n}\left(z\right)'
assert latex(yn(n, z)) == r'y_{n}\left(z\right)'
assert latex(hn1(n, z)) == r'h^{(1)}_{n}\left(z\right)'
assert latex(hn2(n, z)) == r'h^{(2)}_{n}\left(z\right)'
def test_latex_fresnel():
from sympy.functions.special.error_functions import (fresnels, fresnelc)
from sympy.abc import z
assert latex(fresnels(z)) == r'S\left(z\right)'
assert latex(fresnelc(z)) == r'C\left(z\right)'
assert latex(fresnels(z)**2) == r'S^{2}\left(z\right)'
assert latex(fresnelc(z)**2) == r'C^{2}\left(z\right)'
def test_latex_brackets():
assert latex((-1)**x) == r"\left(-1\right)^{x}"
def test_latex_indexed():
Psi_symbol = Symbol('Psi_0', complex=True, real=False)
Psi_indexed = IndexedBase(Symbol('Psi', complex=True, real=False))
symbol_latex = latex(Psi_symbol * conjugate(Psi_symbol))
indexed_latex = latex(Psi_indexed[0] * conjugate(Psi_indexed[0]))
# \\overline{{\\Psi}_{0}} {\\Psi}_{0} vs. \\Psi_{0} \\overline{\\Psi_{0}}
assert symbol_latex == r'\Psi_{0} \overline{\Psi_{0}}'
assert indexed_latex == r'\overline{{\Psi}_{0}} {\Psi}_{0}'
# Symbol('gamma') gives r'\gamma'
assert latex(Indexed('x1', Symbol('i'))) == r'{x_{1}}_{i}'
assert latex(IndexedBase('gamma')) == r'\gamma'
assert latex(IndexedBase('a b')) == r'a b'
assert latex(IndexedBase('a_b')) == r'a_{b}'
def test_latex_derivatives():
# regular "d" for ordinary derivatives
assert latex(diff(x**3, x, evaluate=False)) == \
r"\frac{d}{d x} x^{3}"
assert latex(diff(sin(x) + x**2, x, evaluate=False)) == \
r"\frac{d}{d x} \left(x^{2} + \sin{\left(x \right)}\right)"
assert latex(diff(diff(sin(x) + x**2, x, evaluate=False), evaluate=False))\
== \
r"\frac{d^{2}}{d x^{2}} \left(x^{2} + \sin{\left(x \right)}\right)"
assert latex(diff(diff(diff(sin(x) + x**2, x, evaluate=False), evaluate=False), evaluate=False)) == \
r"\frac{d^{3}}{d x^{3}} \left(x^{2} + \sin{\left(x \right)}\right)"
# \partial for partial derivatives
assert latex(diff(sin(x * y), x, evaluate=False)) == \
r"\frac{\partial}{\partial x} \sin{\left(x y \right)}"
assert latex(diff(sin(x * y) + x**2, x, evaluate=False)) == \
r"\frac{\partial}{\partial x} \left(x^{2} + \sin{\left(x y \right)}\right)"
assert latex(diff(diff(sin(x*y) + x**2, x, evaluate=False), x, evaluate=False)) == \
r"\frac{\partial^{2}}{\partial x^{2}} \left(x^{2} + \sin{\left(x y \right)}\right)"
assert latex(diff(diff(diff(sin(x*y) + x**2, x, evaluate=False), x, evaluate=False), x, evaluate=False)) == \
r"\frac{\partial^{3}}{\partial x^{3}} \left(x^{2} + \sin{\left(x y \right)}\right)"
# mixed partial derivatives
f = Function("f")
assert latex(diff(diff(f(x, y), x, evaluate=False), y, evaluate=False)) == \
r"\frac{\partial^{2}}{\partial y\partial x} " + latex(f(x, y))
assert latex(diff(diff(diff(f(x, y), x, evaluate=False), x, evaluate=False), y, evaluate=False)) == \
r"\frac{\partial^{3}}{\partial y\partial x^{2}} " + latex(f(x, y))
# for negative nested Derivative
assert latex(diff(-diff(y**2,x,evaluate=False),x,evaluate=False)) == r'\frac{d}{d x} \left(- \frac{d}{d x} y^{2}\right)'
assert latex(diff(diff(-diff(diff(y,x,evaluate=False),x,evaluate=False),x,evaluate=False),x,evaluate=False)) == \
r'\frac{d^{2}}{d x^{2}} \left(- \frac{d^{2}}{d x^{2}} y\right)'
# use ordinary d when one of the variables has been integrated out
assert latex(diff(Integral(exp(-x*y), (x, 0, oo)), y, evaluate=False)) == \
r"\frac{d}{d y} \int\limits_{0}^{\infty} e^{- x y}\, dx"
# Derivative wrapped in power:
assert latex(diff(x, x, evaluate=False)**2) == \
r"\left(\frac{d}{d x} x\right)^{2}"
assert latex(diff(f(x), x)**2) == \
r"\left(\frac{d}{d x} f{\left(x \right)}\right)^{2}"
assert latex(diff(f(x), (x, n))) == \
r"\frac{d^{n}}{d x^{n}} f{\left(x \right)}"
x1 = Symbol('x1')
x2 = Symbol('x2')
assert latex(diff(f(x1, x2), x1)) == r'\frac{\partial}{\partial x_{1}} f{\left(x_{1},x_{2} \right)}'
n1 = Symbol('n1')
assert latex(diff(f(x), (x, n1))) == r'\frac{d^{n_{1}}}{d x^{n_{1}}} f{\left(x \right)}'
n2 = Symbol('n2')
assert latex(diff(f(x), (x, Max(n1, n2)))) == \
r'\frac{d^{\max\left(n_{1}, n_{2}\right)}}{d x^{\max\left(n_{1}, n_{2}\right)}} f{\left(x \right)}'
def test_latex_subs():
assert latex(Subs(x*y, (x, y), (1, 2))) == r'\left. x y \right|_{\substack{ x=1\\ y=2 }}'
def test_latex_integrals():
assert latex(Integral(log(x), x)) == r"\int \log{\left(x \right)}\, dx"
assert latex(Integral(x**2, (x, 0, 1))) == \
r"\int\limits_{0}^{1} x^{2}\, dx"
assert latex(Integral(x**2, (x, 10, 20))) == \
r"\int\limits_{10}^{20} x^{2}\, dx"
assert latex(Integral(y*x**2, (x, 0, 1), y)) == \
r"\int\int\limits_{0}^{1} x^{2} y\, dx\, dy"
assert latex(Integral(y*x**2, (x, 0, 1), y), mode='equation*') == \
r"\begin{equation*}\int\int\limits_{0}^{1} x^{2} y\, dx\, dy\end{equation*}"
assert latex(Integral(y*x**2, (x, 0, 1), y), mode='equation*', itex=True) \
== r"$$\int\int_{0}^{1} x^{2} y\, dx\, dy$$"
assert latex(Integral(x, (x, 0))) == r"\int\limits^{0} x\, dx"
assert latex(Integral(x*y, x, y)) == r"\iint x y\, dx\, dy"
assert latex(Integral(x*y*z, x, y, z)) == r"\iiint x y z\, dx\, dy\, dz"
assert latex(Integral(x*y*z*t, x, y, z, t)) == \
r"\iiiint t x y z\, dx\, dy\, dz\, dt"
assert latex(Integral(x, x, x, x, x, x, x)) == \
r"\int\int\int\int\int\int x\, dx\, dx\, dx\, dx\, dx\, dx"
assert latex(Integral(x, x, y, (z, 0, 1))) == \
r"\int\limits_{0}^{1}\int\int x\, dx\, dy\, dz"
# for negative nested Integral
assert latex(Integral(-Integral(y**2,x),x)) == \
r'\int \left(- \int y^{2}\, dx\right)\, dx'
assert latex(Integral(-Integral(-Integral(y,x),x),x)) == \
r'\int \left(- \int \left(- \int y\, dx\right)\, dx\right)\, dx'
# fix issue #10806
assert latex(Integral(z, z)**2) == r"\left(\int z\, dz\right)^{2}"
assert latex(Integral(x + z, z)) == r"\int \left(x + z\right)\, dz"
assert latex(Integral(x+z/2, z)) == \
r"\int \left(x + \frac{z}{2}\right)\, dz"
assert latex(Integral(x**y, z)) == r"\int x^{y}\, dz"
def test_latex_sets():
for s in (frozenset, set):
assert latex(s([x*y, x**2])) == r"\left\{x^{2}, x y\right\}"
assert latex(s(range(1, 6))) == r"\left\{1, 2, 3, 4, 5\right\}"
assert latex(s(range(1, 13))) == \
r"\left\{1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12\right\}"
s = FiniteSet
assert latex(s(*[x*y, x**2])) == r"\left\{x^{2}, x y\right\}"
assert latex(s(*range(1, 6))) == r"\left\{1, 2, 3, 4, 5\right\}"
assert latex(s(*range(1, 13))) == \
r"\left\{1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12\right\}"
def test_latex_SetExpr():
iv = Interval(1, 3)
se = SetExpr(iv)
assert latex(se) == r"SetExpr\left(\left[1, 3\right]\right)"
def test_latex_Range():
assert latex(Range(1, 51)) == r'\left\{1, 2, \ldots, 50\right\}'
assert latex(Range(1, 4)) == r'\left\{1, 2, 3\right\}'
assert latex(Range(0, 3, 1)) == r'\left\{0, 1, 2\right\}'
assert latex(Range(0, 30, 1)) == r'\left\{0, 1, \ldots, 29\right\}'
assert latex(Range(30, 1, -1)) == r'\left\{30, 29, \ldots, 2\right\}'
assert latex(Range(0, oo, 2)) == r'\left\{0, 2, \ldots\right\}'
assert latex(Range(oo, -2, -2)) == r'\left\{\ldots, 2, 0\right\}'
assert latex(Range(-2, -oo, -1)) == r'\left\{-2, -3, \ldots\right\}'
assert latex(Range(-oo, oo)) == r'\left\{\ldots, -1, 0, 1, \ldots\right\}'
assert latex(Range(oo, -oo, -1)) == r'\left\{\ldots, 1, 0, -1, \ldots\right\}'
a, b, c = symbols('a:c')
assert latex(Range(a, b, c)) == r'\text{Range}\left(a, b, c\right)'
assert latex(Range(a, 10, 1)) == r'\text{Range}\left(a, 10\right)'
assert latex(Range(0, b, 1)) == r'\text{Range}\left(b\right)'
assert latex(Range(0, 10, c)) == r'\text{Range}\left(0, 10, c\right)'
i = Symbol('i', integer=True)
n = Symbol('n', negative=True, integer=True)
p = Symbol('p', positive=True, integer=True)
assert latex(Range(i, i + 3)) == r'\left\{i, i + 1, i + 2\right\}'
assert latex(Range(-oo, n, 2)) == r'\left\{\ldots, n - 4, n - 2\right\}'
assert latex(Range(p, oo)) == r'\left\{p, p + 1, \ldots\right\}'
# The following will work if __iter__ is improved
# assert latex(Range(-3, p + 7)) == r'\left\{-3, -2, \ldots, p + 6\right\}'
# Must have integer assumptions
assert latex(Range(a, a + 3)) == r'\text{Range}\left(a, a + 3\right)'
def test_latex_sequences():
s1 = SeqFormula(a**2, (0, oo))
s2 = SeqPer((1, 2))
latex_str = r'\left[0, 1, 4, 9, \ldots\right]'
assert latex(s1) == latex_str
latex_str = r'\left[1, 2, 1, 2, \ldots\right]'
assert latex(s2) == latex_str
s3 = SeqFormula(a**2, (0, 2))
s4 = SeqPer((1, 2), (0, 2))
latex_str = r'\left[0, 1, 4\right]'
assert latex(s3) == latex_str
latex_str = r'\left[1, 2, 1\right]'
assert latex(s4) == latex_str
s5 = SeqFormula(a**2, (-oo, 0))
s6 = SeqPer((1, 2), (-oo, 0))
latex_str = r'\left[\ldots, 9, 4, 1, 0\right]'
assert latex(s5) == latex_str
latex_str = r'\left[\ldots, 2, 1, 2, 1\right]'
assert latex(s6) == latex_str
latex_str = r'\left[1, 3, 5, 11, \ldots\right]'
assert latex(SeqAdd(s1, s2)) == latex_str
latex_str = r'\left[1, 3, 5\right]'
assert latex(SeqAdd(s3, s4)) == latex_str
latex_str = r'\left[\ldots, 11, 5, 3, 1\right]'
assert latex(SeqAdd(s5, s6)) == latex_str
latex_str = r'\left[0, 2, 4, 18, \ldots\right]'
assert latex(SeqMul(s1, s2)) == latex_str
latex_str = r'\left[0, 2, 4\right]'
assert latex(SeqMul(s3, s4)) == latex_str
latex_str = r'\left[\ldots, 18, 4, 2, 0\right]'
assert latex(SeqMul(s5, s6)) == latex_str
# Sequences with symbolic limits, issue 12629
s7 = SeqFormula(a**2, (a, 0, x))
latex_str = r'\left\{a^{2}\right\}_{a=0}^{x}'
assert latex(s7) == latex_str
b = Symbol('b')
s8 = SeqFormula(b*a**2, (a, 0, 2))
latex_str = r'\left[0, b, 4 b\right]'
assert latex(s8) == latex_str
def test_latex_FourierSeries():
latex_str = \
r'2 \sin{\left(x \right)} - \sin{\left(2 x \right)} + \frac{2 \sin{\left(3 x \right)}}{3} + \ldots'
assert latex(fourier_series(x, (x, -pi, pi))) == latex_str
def test_latex_FormalPowerSeries():
latex_str = r'\sum_{k=1}^{\infty} - \frac{\left(-1\right)^{- k} x^{k}}{k}'
assert latex(fps(log(1 + x))) == latex_str
def test_latex_intervals():
a = Symbol('a', real=True)
assert latex(Interval(0, 0)) == r"\left\{0\right\}"
assert latex(Interval(0, a)) == r"\left[0, a\right]"
assert latex(Interval(0, a, False, False)) == r"\left[0, a\right]"
assert latex(Interval(0, a, True, False)) == r"\left(0, a\right]"
assert latex(Interval(0, a, False, True)) == r"\left[0, a\right)"
assert latex(Interval(0, a, True, True)) == r"\left(0, a\right)"
def test_latex_AccumuBounds():
a = Symbol('a', real=True)
assert latex(AccumBounds(0, 1)) == r"\left\langle 0, 1\right\rangle"
assert latex(AccumBounds(0, a)) == r"\left\langle 0, a\right\rangle"
assert latex(AccumBounds(a + 1, a + 2)) == \
r"\left\langle a + 1, a + 2\right\rangle"
def test_latex_emptyset():
assert latex(S.EmptySet) == r"\emptyset"
def test_latex_universalset():
assert latex(S.UniversalSet) == r"\mathbb{U}"
def test_latex_commutator():
A = Operator('A')
B = Operator('B')
comm = Commutator(B, A)
assert latex(comm.doit()) == r"- (A B - B A)"
def test_latex_union():
assert latex(Union(Interval(0, 1), Interval(2, 3))) == \
r"\left[0, 1\right] \cup \left[2, 3\right]"
assert latex(Union(Interval(1, 1), Interval(2, 2), Interval(3, 4))) == \
r"\left\{1, 2\right\} \cup \left[3, 4\right]"
def test_latex_intersection():
assert latex(Intersection(Interval(0, 1), Interval(x, y))) == \
r"\left[0, 1\right] \cap \left[x, y\right]"
def test_latex_symmetric_difference():
assert latex(SymmetricDifference(Interval(2, 5), Interval(4, 7),
evaluate=False)) == \
r'\left[2, 5\right] \triangle \left[4, 7\right]'
def test_latex_Complement():
assert latex(Complement(S.Reals, S.Naturals)) == \
r"\mathbb{R} \setminus \mathbb{N}"
def test_latex_productset():
line = Interval(0, 1)
bigline = Interval(0, 10)
fset = FiniteSet(1, 2, 3)
assert latex(line**2) == r"%s^{2}" % latex(line)
assert latex(line**10) == r"%s^{10}" % latex(line)
assert latex((line * bigline * fset).flatten()) == r"%s \times %s \times %s" % (
latex(line), latex(bigline), latex(fset))
def test_set_operators_parenthesis():
a, b, c, d = symbols('a:d')
A = FiniteSet(a)
B = FiniteSet(b)
C = FiniteSet(c)
D = FiniteSet(d)
U1 = Union(A, B, evaluate=False)
U2 = Union(C, D, evaluate=False)
I1 = Intersection(A, B, evaluate=False)
I2 = Intersection(C, D, evaluate=False)
C1 = Complement(A, B, evaluate=False)
C2 = Complement(C, D, evaluate=False)
D1 = SymmetricDifference(A, B, evaluate=False)
D2 = SymmetricDifference(C, D, evaluate=False)
# XXX ProductSet does not support evaluate keyword
P1 = ProductSet(A, B)
P2 = ProductSet(C, D)
assert latex(Intersection(A, U2, evaluate=False)) == \
r'\left\{a\right\} \cap ' \
r'\left(\left\{c\right\} \cup \left\{d\right\}\right)'
assert latex(Intersection(U1, U2, evaluate=False)) == \
r'\left(\left\{a\right\} \cup \left\{b\right\}\right) ' \
r'\cap \left(\left\{c\right\} \cup \left\{d\right\}\right)'
assert latex(Intersection(C1, C2, evaluate=False)) == \
r'\left(\left\{a\right\} \setminus ' \
r'\left\{b\right\}\right) \cap \left(\left\{c\right\} ' \
r'\setminus \left\{d\right\}\right)'
assert latex(Intersection(D1, D2, evaluate=False)) == \
r'\left(\left\{a\right\} \triangle ' \
r'\left\{b\right\}\right) \cap \left(\left\{c\right\} ' \
r'\triangle \left\{d\right\}\right)'
assert latex(Intersection(P1, P2, evaluate=False)) == \
r'\left(\left\{a\right\} \times \left\{b\right\}\right) ' \
r'\cap \left(\left\{c\right\} \times ' \
r'\left\{d\right\}\right)'
assert latex(Union(A, I2, evaluate=False)) == \
r'\left\{a\right\} \cup ' \
r'\left(\left\{c\right\} \cap \left\{d\right\}\right)'
assert latex(Union(I1, I2, evaluate=False)) == \
r'\left(\left\{a\right\} \cap \left\{b\right\}\right) ' \
r'\cup \left(\left\{c\right\} \cap \left\{d\right\}\right)'
assert latex(Union(C1, C2, evaluate=False)) == \
r'\left(\left\{a\right\} \setminus ' \
r'\left\{b\right\}\right) \cup \left(\left\{c\right\} ' \
r'\setminus \left\{d\right\}\right)'
assert latex(Union(D1, D2, evaluate=False)) == \
r'\left(\left\{a\right\} \triangle ' \
r'\left\{b\right\}\right) \cup \left(\left\{c\right\} ' \
r'\triangle \left\{d\right\}\right)'
assert latex(Union(P1, P2, evaluate=False)) == \
r'\left(\left\{a\right\} \times \left\{b\right\}\right) ' \
r'\cup \left(\left\{c\right\} \times ' \
r'\left\{d\right\}\right)'
assert latex(Complement(A, C2, evaluate=False)) == \
r'\left\{a\right\} \setminus \left(\left\{c\right\} ' \
r'\setminus \left\{d\right\}\right)'
assert latex(Complement(U1, U2, evaluate=False)) == \
r'\left(\left\{a\right\} \cup \left\{b\right\}\right) ' \
r'\setminus \left(\left\{c\right\} \cup ' \
r'\left\{d\right\}\right)'
assert latex(Complement(I1, I2, evaluate=False)) == \
r'\left(\left\{a\right\} \cap \left\{b\right\}\right) ' \
r'\setminus \left(\left\{c\right\} \cap ' \
r'\left\{d\right\}\right)'
assert latex(Complement(D1, D2, evaluate=False)) == \
r'\left(\left\{a\right\} \triangle ' \
r'\left\{b\right\}\right) \setminus ' \
r'\left(\left\{c\right\} \triangle \left\{d\right\}\right)'
assert latex(Complement(P1, P2, evaluate=False)) == \
r'\left(\left\{a\right\} \times \left\{b\right\}\right) '\
r'\setminus \left(\left\{c\right\} \times '\
r'\left\{d\right\}\right)'
assert latex(SymmetricDifference(A, D2, evaluate=False)) == \
r'\left\{a\right\} \triangle \left(\left\{c\right\} ' \
r'\triangle \left\{d\right\}\right)'
assert latex(SymmetricDifference(U1, U2, evaluate=False)) == \
r'\left(\left\{a\right\} \cup \left\{b\right\}\right) ' \
r'\triangle \left(\left\{c\right\} \cup ' \
r'\left\{d\right\}\right)'
assert latex(SymmetricDifference(I1, I2, evaluate=False)) == \
r'\left(\left\{a\right\} \cap \left\{b\right\}\right) ' \
r'\triangle \left(\left\{c\right\} \cap ' \
r'\left\{d\right\}\right)'
assert latex(SymmetricDifference(C1, C2, evaluate=False)) == \
r'\left(\left\{a\right\} \setminus ' \
r'\left\{b\right\}\right) \triangle ' \
r'\left(\left\{c\right\} \setminus \left\{d\right\}\right)'
assert latex(SymmetricDifference(P1, P2, evaluate=False)) == \
r'\left(\left\{a\right\} \times \left\{b\right\}\right) ' \
r'\triangle \left(\left\{c\right\} \times ' \
r'\left\{d\right\}\right)'
# XXX This can be incorrect since cartesian product is not associative
assert latex(ProductSet(A, P2).flatten()) == \
r'\left\{a\right\} \times \left\{c\right\} \times ' \
r'\left\{d\right\}'
assert latex(ProductSet(U1, U2)) == \
r'\left(\left\{a\right\} \cup \left\{b\right\}\right) ' \
r'\times \left(\left\{c\right\} \cup ' \
r'\left\{d\right\}\right)'
assert latex(ProductSet(I1, I2)) == \
r'\left(\left\{a\right\} \cap \left\{b\right\}\right) ' \
r'\times \left(\left\{c\right\} \cap ' \
r'\left\{d\right\}\right)'
assert latex(ProductSet(C1, C2)) == \
r'\left(\left\{a\right\} \setminus ' \
r'\left\{b\right\}\right) \times \left(\left\{c\right\} ' \
r'\setminus \left\{d\right\}\right)'
assert latex(ProductSet(D1, D2)) == \
r'\left(\left\{a\right\} \triangle ' \
r'\left\{b\right\}\right) \times \left(\left\{c\right\} ' \
r'\triangle \left\{d\right\}\right)'
def test_latex_Complexes():
assert latex(S.Complexes) == r"\mathbb{C}"
def test_latex_Naturals():
assert latex(S.Naturals) == r"\mathbb{N}"
def test_latex_Naturals0():
assert latex(S.Naturals0) == r"\mathbb{N}_0"
def test_latex_Integers():
assert latex(S.Integers) == r"\mathbb{Z}"
def test_latex_ImageSet():
x = Symbol('x')
assert latex(ImageSet(Lambda(x, x**2), S.Naturals)) == \
r"\left\{x^{2}\; \middle|\; x \in \mathbb{N}\right\}"
y = Symbol('y')
imgset = ImageSet(Lambda((x, y), x + y), {1, 2, 3}, {3, 4})
assert latex(imgset) == \
r"\left\{x + y\; \middle|\; x \in \left\{1, 2, 3\right\} , y \in \left\{3, 4\right\}\right\}"
imgset = ImageSet(Lambda(((x, y),), x + y), ProductSet({1, 2, 3}, {3, 4}))
assert latex(imgset) == \
r"\left\{x + y\; \middle|\; \left( x, \ y\right) \in \left\{1, 2, 3\right\} \times \left\{3, 4\right\}\right\}"
def test_latex_ConditionSet():
x = Symbol('x')
assert latex(ConditionSet(x, Eq(x**2, 1), S.Reals)) == \
r"\left\{x\; \middle|\; x \in \mathbb{R} \wedge x^{2} = 1 \right\}"
assert latex(ConditionSet(x, Eq(x**2, 1), S.UniversalSet)) == \
r"\left\{x\; \middle|\; x^{2} = 1 \right\}"
def test_latex_ComplexRegion():
assert latex(ComplexRegion(Interval(3, 5)*Interval(4, 6))) == \
r"\left\{x + y i\; \middle|\; x, y \in \left[3, 5\right] \times \left[4, 6\right] \right\}"
assert latex(ComplexRegion(Interval(0, 1)*Interval(0, 2*pi), polar=True)) == \
r"\left\{r \left(i \sin{\left(\theta \right)} + \cos{\left(\theta "\
r"\right)}\right)\; \middle|\; r, \theta \in \left[0, 1\right] \times \left[0, 2 \pi\right) \right\}"
def test_latex_Contains():
x = Symbol('x')
assert latex(Contains(x, S.Naturals)) == r"x \in \mathbb{N}"
def test_latex_sum():
assert latex(Sum(x*y**2, (x, -2, 2), (y, -5, 5))) == \
r"\sum_{\substack{-2 \leq x \leq 2\\-5 \leq y \leq 5}} x y^{2}"
assert latex(Sum(x**2, (x, -2, 2))) == \
r"\sum_{x=-2}^{2} x^{2}"
assert latex(Sum(x**2 + y, (x, -2, 2))) == \
r"\sum_{x=-2}^{2} \left(x^{2} + y\right)"
assert latex(Sum(x**2 + y, (x, -2, 2))**2) == \
r"\left(\sum_{x=-2}^{2} \left(x^{2} + y\right)\right)^{2}"
def test_latex_product():
assert latex(Product(x*y**2, (x, -2, 2), (y, -5, 5))) == \
r"\prod_{\substack{-2 \leq x \leq 2\\-5 \leq y \leq 5}} x y^{2}"
assert latex(Product(x**2, (x, -2, 2))) == \
r"\prod_{x=-2}^{2} x^{2}"
assert latex(Product(x**2 + y, (x, -2, 2))) == \
r"\prod_{x=-2}^{2} \left(x^{2} + y\right)"
assert latex(Product(x, (x, -2, 2))**2) == \
r"\left(\prod_{x=-2}^{2} x\right)^{2}"
def test_latex_limits():
assert latex(Limit(x, x, oo)) == r"\lim_{x \to \infty} x"
# issue 8175
f = Function('f')
assert latex(Limit(f(x), x, 0)) == r"\lim_{x \to 0^+} f{\left(x \right)}"
assert latex(Limit(f(x), x, 0, "-")) == \
r"\lim_{x \to 0^-} f{\left(x \right)}"
# issue #10806
assert latex(Limit(f(x), x, 0)**2) == \
r"\left(\lim_{x \to 0^+} f{\left(x \right)}\right)^{2}"
# bi-directional limit
assert latex(Limit(f(x), x, 0, dir='+-')) == \
r"\lim_{x \to 0} f{\left(x \right)}"
def test_latex_log():
assert latex(log(x)) == r"\log{\left(x \right)}"
assert latex(ln(x)) == r"\log{\left(x \right)}"
assert latex(log(x), ln_notation=True) == r"\ln{\left(x \right)}"
assert latex(log(x)+log(y)) == \
r"\log{\left(x \right)} + \log{\left(y \right)}"
assert latex(log(x)+log(y), ln_notation=True) == \
r"\ln{\left(x \right)} + \ln{\left(y \right)}"
assert latex(pow(log(x), x)) == r"\log{\left(x \right)}^{x}"
assert latex(pow(log(x), x), ln_notation=True) == \
r"\ln{\left(x \right)}^{x}"
def test_issue_3568():
beta = Symbol(r'\beta')
y = beta + x
assert latex(y) in [r'\beta + x', r'x + \beta']
beta = Symbol(r'beta')
y = beta + x
assert latex(y) in [r'\beta + x', r'x + \beta']
def test_latex():
assert latex((2*tau)**Rational(7, 2)) == r"8 \sqrt{2} \tau^{\frac{7}{2}}"
assert latex((2*mu)**Rational(7, 2), mode='equation*') == \
r"\begin{equation*}8 \sqrt{2} \mu^{\frac{7}{2}}\end{equation*}"
assert latex((2*mu)**Rational(7, 2), mode='equation', itex=True) == \
r"$$8 \sqrt{2} \mu^{\frac{7}{2}}$$"
assert latex([2/x, y]) == r"\left[ \frac{2}{x}, \ y\right]"
def test_latex_dict():
d = {Rational(1): 1, x**2: 2, x: 3, x**3: 4}
assert latex(d) == \
r'\left\{ 1 : 1, \ x : 3, \ x^{2} : 2, \ x^{3} : 4\right\}'
D = Dict(d)
assert latex(D) == \
r'\left\{ 1 : 1, \ x : 3, \ x^{2} : 2, \ x^{3} : 4\right\}'
def test_latex_list():
ll = [Symbol('omega1'), Symbol('a'), Symbol('alpha')]
assert latex(ll) == r'\left[ \omega_{1}, \ a, \ \alpha\right]'
def test_latex_rational():
# tests issue 3973
assert latex(-Rational(1, 2)) == r"- \frac{1}{2}"
assert latex(Rational(-1, 2)) == r"- \frac{1}{2}"
assert latex(Rational(1, -2)) == r"- \frac{1}{2}"
assert latex(-Rational(-1, 2)) == r"\frac{1}{2}"
assert latex(-Rational(1, 2)*x) == r"- \frac{x}{2}"
assert latex(-Rational(1, 2)*x + Rational(-2, 3)*y) == \
r"- \frac{x}{2} - \frac{2 y}{3}"
def test_latex_inverse():
# tests issue 4129
assert latex(1/x) == r"\frac{1}{x}"
assert latex(1/(x + y)) == r"\frac{1}{x + y}"
def test_latex_DiracDelta():
assert latex(DiracDelta(x)) == r"\delta\left(x\right)"
assert latex(DiracDelta(x)**2) == r"\left(\delta\left(x\right)\right)^{2}"
assert latex(DiracDelta(x, 0)) == r"\delta\left(x\right)"
assert latex(DiracDelta(x, 5)) == \
r"\delta^{\left( 5 \right)}\left( x \right)"
assert latex(DiracDelta(x, 5)**2) == \
r"\left(\delta^{\left( 5 \right)}\left( x \right)\right)^{2}"
def test_latex_Heaviside():
assert latex(Heaviside(x)) == r"\theta\left(x\right)"
assert latex(Heaviside(x)**2) == r"\left(\theta\left(x\right)\right)^{2}"
def test_latex_KroneckerDelta():
assert latex(KroneckerDelta(x, y)) == r"\delta_{x y}"
assert latex(KroneckerDelta(x, y + 1)) == r"\delta_{x, y + 1}"
# issue 6578
assert latex(KroneckerDelta(x + 1, y)) == r"\delta_{y, x + 1}"
assert latex(Pow(KroneckerDelta(x, y), 2, evaluate=False)) == \
r"\left(\delta_{x y}\right)^{2}"
def test_latex_LeviCivita():
assert latex(LeviCivita(x, y, z)) == r"\varepsilon_{x y z}"
assert latex(LeviCivita(x, y, z)**2) == \
r"\left(\varepsilon_{x y z}\right)^{2}"
assert latex(LeviCivita(x, y, z + 1)) == r"\varepsilon_{x, y, z + 1}"
assert latex(LeviCivita(x, y + 1, z)) == r"\varepsilon_{x, y + 1, z}"
assert latex(LeviCivita(x + 1, y, z)) == r"\varepsilon_{x + 1, y, z}"
def test_mode():
expr = x + y
assert latex(expr) == r'x + y'
assert latex(expr, mode='plain') == r'x + y'
assert latex(expr, mode='inline') == r'$x + y$'
assert latex(
expr, mode='equation*') == r'\begin{equation*}x + y\end{equation*}'
assert latex(
expr, mode='equation') == r'\begin{equation}x + y\end{equation}'
raises(ValueError, lambda: latex(expr, mode='foo'))
def test_latex_mathieu():
assert latex(mathieuc(x, y, z)) == r"C\left(x, y, z\right)"
assert latex(mathieus(x, y, z)) == r"S\left(x, y, z\right)"
assert latex(mathieuc(x, y, z)**2) == r"C\left(x, y, z\right)^{2}"
assert latex(mathieus(x, y, z)**2) == r"S\left(x, y, z\right)^{2}"
assert latex(mathieucprime(x, y, z)) == r"C^{\prime}\left(x, y, z\right)"
assert latex(mathieusprime(x, y, z)) == r"S^{\prime}\left(x, y, z\right)"
assert latex(mathieucprime(x, y, z)**2) == r"C^{\prime}\left(x, y, z\right)^{2}"
assert latex(mathieusprime(x, y, z)**2) == r"S^{\prime}\left(x, y, z\right)^{2}"
def test_latex_Piecewise():
p = Piecewise((x, x < 1), (x**2, True))
assert latex(p) == r"\begin{cases} x & \text{for}\: x < 1 \\x^{2} &" \
r" \text{otherwise} \end{cases}"
assert latex(p, itex=True) == \
r"\begin{cases} x & \text{for}\: x \lt 1 \\x^{2} &" \
r" \text{otherwise} \end{cases}"
p = Piecewise((x, x < 0), (0, x >= 0))
assert latex(p) == r'\begin{cases} x & \text{for}\: x < 0 \\0 &' \
r' \text{otherwise} \end{cases}'
A, B = symbols("A B", commutative=False)
p = Piecewise((A**2, Eq(A, B)), (A*B, True))
s = r"\begin{cases} A^{2} & \text{for}\: A = B \\A B & \text{otherwise} \end{cases}"
assert latex(p) == s
assert latex(A*p) == r"A \left(%s\right)" % s
assert latex(p*A) == r"\left(%s\right) A" % s
assert latex(Piecewise((x, x < 1), (x**2, x < 2))) == \
r'\begin{cases} x & ' \
r'\text{for}\: x < 1 \\x^{2} & \text{for}\: x < 2 \end{cases}'
def test_latex_Matrix():
M = Matrix([[1 + x, y], [y, x - 1]])
assert latex(M) == \
r'\left[\begin{matrix}x + 1 & y\\y & x - 1\end{matrix}\right]'
assert latex(M, mode='inline') == \
r'$\left[\begin{smallmatrix}x + 1 & y\\' \
r'y & x - 1\end{smallmatrix}\right]$'
assert latex(M, mat_str='array') == \
r'\left[\begin{array}{cc}x + 1 & y\\y & x - 1\end{array}\right]'
assert latex(M, mat_str='bmatrix') == \
r'\left[\begin{bmatrix}x + 1 & y\\y & x - 1\end{bmatrix}\right]'
assert latex(M, mat_delim=None, mat_str='bmatrix') == \
r'\begin{bmatrix}x + 1 & y\\y & x - 1\end{bmatrix}'
M2 = Matrix(1, 11, range(11))
assert latex(M2) == \
r'\left[\begin{array}{ccccccccccc}' \
r'0 & 1 & 2 & 3 & 4 & 5 & 6 & 7 & 8 & 9 & 10\end{array}\right]'
def test_latex_matrix_with_functions():
t = symbols('t')
theta1 = symbols('theta1', cls=Function)
M = Matrix([[sin(theta1(t)), cos(theta1(t))],
[cos(theta1(t).diff(t)), sin(theta1(t).diff(t))]])
expected = (r'\left[\begin{matrix}\sin{\left('
r'\theta_{1}{\left(t \right)} \right)} & '
r'\cos{\left(\theta_{1}{\left(t \right)} \right)'
r'}\\\cos{\left(\frac{d}{d t} \theta_{1}{\left(t '
r'\right)} \right)} & \sin{\left(\frac{d}{d t} '
r'\theta_{1}{\left(t \right)} \right'
r')}\end{matrix}\right]')
assert latex(M) == expected
def test_latex_NDimArray():
x, y, z, w = symbols("x y z w")
for ArrayType in (ImmutableDenseNDimArray, ImmutableSparseNDimArray,
MutableDenseNDimArray, MutableSparseNDimArray):
# Basic: scalar array
M = ArrayType(x)
assert latex(M) == r"x"
M = ArrayType([[1 / x, y], [z, w]])
M1 = ArrayType([1 / x, y, z])
M2 = tensorproduct(M1, M)
M3 = tensorproduct(M, M)
assert latex(M) == \
r'\left[\begin{matrix}\frac{1}{x} & y\\z & w\end{matrix}\right]'
assert latex(M1) == \
r"\left[\begin{matrix}\frac{1}{x} & y & z\end{matrix}\right]"
assert latex(M2) == \
r"\left[\begin{matrix}" \
r"\left[\begin{matrix}\frac{1}{x^{2}} & \frac{y}{x}\\\frac{z}{x} & \frac{w}{x}\end{matrix}\right] & " \
r"\left[\begin{matrix}\frac{y}{x} & y^{2}\\y z & w y\end{matrix}\right] & " \
r"\left[\begin{matrix}\frac{z}{x} & y z\\z^{2} & w z\end{matrix}\right]" \
r"\end{matrix}\right]"
assert latex(M3) == \
r"""\left[\begin{matrix}"""\
r"""\left[\begin{matrix}\frac{1}{x^{2}} & \frac{y}{x}\\\frac{z}{x} & \frac{w}{x}\end{matrix}\right] & """\
r"""\left[\begin{matrix}\frac{y}{x} & y^{2}\\y z & w y\end{matrix}\right]\\"""\
r"""\left[\begin{matrix}\frac{z}{x} & y z\\z^{2} & w z\end{matrix}\right] & """\
r"""\left[\begin{matrix}\frac{w}{x} & w y\\w z & w^{2}\end{matrix}\right]"""\
r"""\end{matrix}\right]"""
Mrow = ArrayType([[x, y, 1/z]])
Mcolumn = ArrayType([[x], [y], [1/z]])
Mcol2 = ArrayType([Mcolumn.tolist()])
assert latex(Mrow) == \
r"\left[\left[\begin{matrix}x & y & \frac{1}{z}\end{matrix}\right]\right]"
assert latex(Mcolumn) == \
r"\left[\begin{matrix}x\\y\\\frac{1}{z}\end{matrix}\right]"
assert latex(Mcol2) == \
r'\left[\begin{matrix}\left[\begin{matrix}x\\y\\\frac{1}{z}\end{matrix}\right]\end{matrix}\right]'
def test_latex_mul_symbol():
assert latex(4*4**x, mul_symbol='times') == r"4 \times 4^{x}"
assert latex(4*4**x, mul_symbol='dot') == r"4 \cdot 4^{x}"
assert latex(4*4**x, mul_symbol='ldot') == r"4 \,.\, 4^{x}"
assert latex(4*x, mul_symbol='times') == r"4 \times x"
assert latex(4*x, mul_symbol='dot') == r"4 \cdot x"
assert latex(4*x, mul_symbol='ldot') == r"4 \,.\, x"
def test_latex_issue_4381():
y = 4*4**log(2)
assert latex(y) == r'4 \cdot 4^{\log{\left(2 \right)}}'
assert latex(1/y) == r'\frac{1}{4 \cdot 4^{\log{\left(2 \right)}}}'
def test_latex_issue_4576():
assert latex(Symbol("beta_13_2")) == r"\beta_{13 2}"
assert latex(Symbol("beta_132_20")) == r"\beta_{132 20}"
assert latex(Symbol("beta_13")) == r"\beta_{13}"
assert latex(Symbol("x_a_b")) == r"x_{a b}"
assert latex(Symbol("x_1_2_3")) == r"x_{1 2 3}"
assert latex(Symbol("x_a_b1")) == r"x_{a b1}"
assert latex(Symbol("x_a_1")) == r"x_{a 1}"
assert latex(Symbol("x_1_a")) == r"x_{1 a}"
assert latex(Symbol("x_1^aa")) == r"x^{aa}_{1}"
assert latex(Symbol("x_1__aa")) == r"x^{aa}_{1}"
assert latex(Symbol("x_11^a")) == r"x^{a}_{11}"
assert latex(Symbol("x_11__a")) == r"x^{a}_{11}"
assert latex(Symbol("x_a_a_a_a")) == r"x_{a a a a}"
assert latex(Symbol("x_a_a^a^a")) == r"x^{a a}_{a a}"
assert latex(Symbol("x_a_a__a__a")) == r"x^{a a}_{a a}"
assert latex(Symbol("alpha_11")) == r"\alpha_{11}"
assert latex(Symbol("alpha_11_11")) == r"\alpha_{11 11}"
assert latex(Symbol("alpha_alpha")) == r"\alpha_{\alpha}"
assert latex(Symbol("alpha^aleph")) == r"\alpha^{\aleph}"
assert latex(Symbol("alpha__aleph")) == r"\alpha^{\aleph}"
def test_latex_pow_fraction():
x = Symbol('x')
# Testing exp
assert r'e^{-x}' in latex(exp(-x)/2).replace(' ', '') # Remove Whitespace
# Testing e^{-x} in case future changes alter behavior of muls or fracs
# In particular current output is \frac{1}{2}e^{- x} but perhaps this will
# change to \frac{e^{-x}}{2}
# Testing general, non-exp, power
assert r'3^{-x}' in latex(3**-x/2).replace(' ', '')
def test_noncommutative():
A, B, C = symbols('A,B,C', commutative=False)
assert latex(A*B*C**-1) == r"A B C^{-1}"
assert latex(C**-1*A*B) == r"C^{-1} A B"
assert latex(A*C**-1*B) == r"A C^{-1} B"
def test_latex_order():
expr = x**3 + x**2*y + y**4 + 3*x*y**3
assert latex(expr, order='lex') == r"x^{3} + x^{2} y + 3 x y^{3} + y^{4}"
assert latex(
expr, order='rev-lex') == r"y^{4} + 3 x y^{3} + x^{2} y + x^{3}"
assert latex(expr, order='none') == r"x^{3} + y^{4} + y x^{2} + 3 x y^{3}"
def test_latex_Lambda():
assert latex(Lambda(x, x + 1)) == r"\left( x \mapsto x + 1 \right)"
assert latex(Lambda((x, y), x + 1)) == r"\left( \left( x, \ y\right) \mapsto x + 1 \right)"
assert latex(Lambda(x, x)) == r"\left( x \mapsto x \right)"
def test_latex_PolyElement():
Ruv, u, v = ring("u,v", ZZ)
Rxyz, x, y, z = ring("x,y,z", Ruv)
assert latex(x - x) == r"0"
assert latex(x - 1) == r"x - 1"
assert latex(x + 1) == r"x + 1"
assert latex((u**2 + 3*u*v + 1)*x**2*y + u + 1) == \
r"\left({u}^{2} + 3 u v + 1\right) {x}^{2} y + u + 1"
assert latex((u**2 + 3*u*v + 1)*x**2*y + (u + 1)*x) == \
r"\left({u}^{2} + 3 u v + 1\right) {x}^{2} y + \left(u + 1\right) x"
assert latex((u**2 + 3*u*v + 1)*x**2*y + (u + 1)*x + 1) == \
r"\left({u}^{2} + 3 u v + 1\right) {x}^{2} y + \left(u + 1\right) x + 1"
assert latex((-u**2 + 3*u*v - 1)*x**2*y - (u + 1)*x - 1) == \
r"-\left({u}^{2} - 3 u v + 1\right) {x}^{2} y - \left(u + 1\right) x - 1"
assert latex(-(v**2 + v + 1)*x + 3*u*v + 1) == \
r"-\left({v}^{2} + v + 1\right) x + 3 u v + 1"
assert latex(-(v**2 + v + 1)*x - 3*u*v + 1) == \
r"-\left({v}^{2} + v + 1\right) x - 3 u v + 1"
def test_latex_FracElement():
Fuv, u, v = field("u,v", ZZ)
Fxyzt, x, y, z, t = field("x,y,z,t", Fuv)
assert latex(x - x) == r"0"
assert latex(x - 1) == r"x - 1"
assert latex(x + 1) == r"x + 1"
assert latex(x/3) == r"\frac{x}{3}"
assert latex(x/z) == r"\frac{x}{z}"
assert latex(x*y/z) == r"\frac{x y}{z}"
assert latex(x/(z*t)) == r"\frac{x}{z t}"
assert latex(x*y/(z*t)) == r"\frac{x y}{z t}"
assert latex((x - 1)/y) == r"\frac{x - 1}{y}"
assert latex((x + 1)/y) == r"\frac{x + 1}{y}"
assert latex((-x - 1)/y) == r"\frac{-x - 1}{y}"
assert latex((x + 1)/(y*z)) == r"\frac{x + 1}{y z}"
assert latex(-y/(x + 1)) == r"\frac{-y}{x + 1}"
assert latex(y*z/(x + 1)) == r"\frac{y z}{x + 1}"
assert latex(((u + 1)*x*y + 1)/((v - 1)*z - 1)) == \
r"\frac{\left(u + 1\right) x y + 1}{\left(v - 1\right) z - 1}"
assert latex(((u + 1)*x*y + 1)/((v - 1)*z - t*u*v - 1)) == \
r"\frac{\left(u + 1\right) x y + 1}{\left(v - 1\right) z - u v t - 1}"
def test_latex_Poly():
assert latex(Poly(x**2 + 2 * x, x)) == \
r"\operatorname{Poly}{\left( x^{2} + 2 x, x, domain=\mathbb{Z} \right)}"
assert latex(Poly(x/y, x)) == \
r"\operatorname{Poly}{\left( \frac{1}{y} x, x, domain=\mathbb{Z}\left(y\right) \right)}"
assert latex(Poly(2.0*x + y)) == \
r"\operatorname{Poly}{\left( 2.0 x + 1.0 y, x, y, domain=\mathbb{R} \right)}"
def test_latex_Poly_order():
assert latex(Poly([a, 1, b, 2, c, 3], x)) == \
r'\operatorname{Poly}{\left( a x^{5} + x^{4} + b x^{3} + 2 x^{2} + c'\
r' x + 3, x, domain=\mathbb{Z}\left[a, b, c\right] \right)}'
assert latex(Poly([a, 1, b+c, 2, 3], x)) == \
r'\operatorname{Poly}{\left( a x^{4} + x^{3} + \left(b + c\right) '\
r'x^{2} + 2 x + 3, x, domain=\mathbb{Z}\left[a, b, c\right] \right)}'
assert latex(Poly(a*x**3 + x**2*y - x*y - c*y**3 - b*x*y**2 + y - a*x + b,
(x, y))) == \
r'\operatorname{Poly}{\left( a x^{3} + x^{2}y - b xy^{2} - xy - '\
r'a x - c y^{3} + y + b, x, y, domain=\mathbb{Z}\left[a, b, c\right] \right)}'
def test_latex_ComplexRootOf():
assert latex(rootof(x**5 + x + 3, 0)) == \
r"\operatorname{CRootOf} {\left(x^{5} + x + 3, 0\right)}"
def test_latex_RootSum():
assert latex(RootSum(x**5 + x + 3, sin)) == \
r"\operatorname{RootSum} {\left(x^{5} + x + 3, \left( x \mapsto \sin{\left(x \right)} \right)\right)}"
def test_settings():
raises(TypeError, lambda: latex(x*y, method="garbage"))
def test_latex_numbers():
assert latex(catalan(n)) == r"C_{n}"
assert latex(catalan(n)**2) == r"C_{n}^{2}"
assert latex(bernoulli(n)) == r"B_{n}"
assert latex(bernoulli(n, x)) == r"B_{n}\left(x\right)"
assert latex(bernoulli(n)**2) == r"B_{n}^{2}"
assert latex(bernoulli(n, x)**2) == r"B_{n}^{2}\left(x\right)"
assert latex(bell(n)) == r"B_{n}"
assert latex(bell(n, x)) == r"B_{n}\left(x\right)"
assert latex(bell(n, m, (x, y))) == r"B_{n, m}\left(x, y\right)"
assert latex(bell(n)**2) == r"B_{n}^{2}"
assert latex(bell(n, x)**2) == r"B_{n}^{2}\left(x\right)"
assert latex(bell(n, m, (x, y))**2) == r"B_{n, m}^{2}\left(x, y\right)"
assert latex(fibonacci(n)) == r"F_{n}"
assert latex(fibonacci(n, x)) == r"F_{n}\left(x\right)"
assert latex(fibonacci(n)**2) == r"F_{n}^{2}"
assert latex(fibonacci(n, x)**2) == r"F_{n}^{2}\left(x\right)"
assert latex(lucas(n)) == r"L_{n}"
assert latex(lucas(n)**2) == r"L_{n}^{2}"
assert latex(tribonacci(n)) == r"T_{n}"
assert latex(tribonacci(n, x)) == r"T_{n}\left(x\right)"
assert latex(tribonacci(n)**2) == r"T_{n}^{2}"
assert latex(tribonacci(n, x)**2) == r"T_{n}^{2}\left(x\right)"
def test_latex_euler():
assert latex(euler(n)) == r"E_{n}"
assert latex(euler(n, x)) == r"E_{n}\left(x\right)"
assert latex(euler(n, x)**2) == r"E_{n}^{2}\left(x\right)"
def test_lamda():
assert latex(Symbol('lamda')) == r"\lambda"
assert latex(Symbol('Lamda')) == r"\Lambda"
def test_custom_symbol_names():
x = Symbol('x')
y = Symbol('y')
assert latex(x) == r"x"
assert latex(x, symbol_names={x: "x_i"}) == r"x_i"
assert latex(x + y, symbol_names={x: "x_i"}) == r"x_i + y"
assert latex(x**2, symbol_names={x: "x_i"}) == r"x_i^{2}"
assert latex(x + y, symbol_names={x: "x_i", y: "y_j"}) == r"x_i + y_j"
def test_matAdd():
C = MatrixSymbol('C', 5, 5)
B = MatrixSymbol('B', 5, 5)
l = LatexPrinter()
assert l._print(C - 2*B) in [r'- 2 B + C', r'C -2 B']
assert l._print(C + 2*B) in [r'2 B + C', r'C + 2 B']
assert l._print(B - 2*C) in [r'B - 2 C', r'- 2 C + B']
assert l._print(B + 2*C) in [r'B + 2 C', r'2 C + B']
def test_matMul():
A = MatrixSymbol('A', 5, 5)
B = MatrixSymbol('B', 5, 5)
x = Symbol('x')
lp = LatexPrinter()
assert lp._print_MatMul(2*A) == r'2 A'
assert lp._print_MatMul(2*x*A) == r'2 x A'
assert lp._print_MatMul(-2*A) == r'- 2 A'
assert lp._print_MatMul(1.5*A) == r'1.5 A'
assert lp._print_MatMul(sqrt(2)*A) == r'\sqrt{2} A'
assert lp._print_MatMul(-sqrt(2)*A) == r'- \sqrt{2} A'
assert lp._print_MatMul(2*sqrt(2)*x*A) == r'2 \sqrt{2} x A'
assert lp._print_MatMul(-2*A*(A + 2*B)) in [r'- 2 A \left(A + 2 B\right)',
r'- 2 A \left(2 B + A\right)']
def test_latex_MatrixSlice():
n = Symbol('n', integer=True)
x, y, z, w, t, = symbols('x y z w t')
X = MatrixSymbol('X', n, n)
Y = MatrixSymbol('Y', 10, 10)
Z = MatrixSymbol('Z', 10, 10)
assert latex(MatrixSlice(X, (None, None, None), (None, None, None))) == r'X\left[:, :\right]'
assert latex(X[x:x + 1, y:y + 1]) == r'X\left[x:x + 1, y:y + 1\right]'
assert latex(X[x:x + 1:2, y:y + 1:2]) == r'X\left[x:x + 1:2, y:y + 1:2\right]'
assert latex(X[:x, y:]) == r'X\left[:x, y:\right]'
assert latex(X[:x, y:]) == r'X\left[:x, y:\right]'
assert latex(X[x:, :y]) == r'X\left[x:, :y\right]'
assert latex(X[x:y, z:w]) == r'X\left[x:y, z:w\right]'
assert latex(X[x:y:t, w:t:x]) == r'X\left[x:y:t, w:t:x\right]'
assert latex(X[x::y, t::w]) == r'X\left[x::y, t::w\right]'
assert latex(X[:x:y, :t:w]) == r'X\left[:x:y, :t:w\right]'
assert latex(X[::x, ::y]) == r'X\left[::x, ::y\right]'
assert latex(MatrixSlice(X, (0, None, None), (0, None, None))) == r'X\left[:, :\right]'
assert latex(MatrixSlice(X, (None, n, None), (None, n, None))) == r'X\left[:, :\right]'
assert latex(MatrixSlice(X, (0, n, None), (0, n, None))) == r'X\left[:, :\right]'
assert latex(MatrixSlice(X, (0, n, 2), (0, n, 2))) == r'X\left[::2, ::2\right]'
assert latex(X[1:2:3, 4:5:6]) == r'X\left[1:2:3, 4:5:6\right]'
assert latex(X[1:3:5, 4:6:8]) == r'X\left[1:3:5, 4:6:8\right]'
assert latex(X[1:10:2]) == r'X\left[1:10:2, :\right]'
assert latex(Y[:5, 1:9:2]) == r'Y\left[:5, 1:9:2\right]'
assert latex(Y[:5, 1:10:2]) == r'Y\left[:5, 1::2\right]'
assert latex(Y[5, :5:2]) == r'Y\left[5:6, :5:2\right]'
assert latex(X[0:1, 0:1]) == r'X\left[:1, :1\right]'
assert latex(X[0:1:2, 0:1:2]) == r'X\left[:1:2, :1:2\right]'
assert latex((Y + Z)[2:, 2:]) == r'\left(Y + Z\right)\left[2:, 2:\right]'
def test_latex_RandomDomain():
from sympy.stats import Normal, Die, Exponential, pspace, where
from sympy.stats.rv import RandomDomain
X = Normal('x1', 0, 1)
assert latex(where(X > 0)) == r"\text{Domain: }0 < x_{1} \wedge x_{1} < \infty"
D = Die('d1', 6)
assert latex(where(D > 4)) == r"\text{Domain: }d_{1} = 5 \vee d_{1} = 6"
A = Exponential('a', 1)
B = Exponential('b', 1)
assert latex(
pspace(Tuple(A, B)).domain) == \
r"\text{Domain: }0 \leq a \wedge 0 \leq b \wedge a < \infty \wedge b < \infty"
assert latex(RandomDomain(FiniteSet(x), FiniteSet(1, 2))) == \
r'\text{Domain: }\left\{x\right\}\text{ in }\left\{1, 2\right\}'
def test_PrettyPoly():
from sympy.polys.domains import QQ
F = QQ.frac_field(x, y)
R = QQ[x, y]
assert latex(F.convert(x/(x + y))) == latex(x/(x + y))
assert latex(R.convert(x + y)) == latex(x + y)
def test_integral_transforms():
x = Symbol("x")
k = Symbol("k")
f = Function("f")
a = Symbol("a")
b = Symbol("b")
assert latex(MellinTransform(f(x), x, k)) == \
r"\mathcal{M}_{x}\left[f{\left(x \right)}\right]\left(k\right)"
assert latex(InverseMellinTransform(f(k), k, x, a, b)) == \
r"\mathcal{M}^{-1}_{k}\left[f{\left(k \right)}\right]\left(x\right)"
assert latex(LaplaceTransform(f(x), x, k)) == \
r"\mathcal{L}_{x}\left[f{\left(x \right)}\right]\left(k\right)"
assert latex(InverseLaplaceTransform(f(k), k, x, (a, b))) == \
r"\mathcal{L}^{-1}_{k}\left[f{\left(k \right)}\right]\left(x\right)"
assert latex(FourierTransform(f(x), x, k)) == \
r"\mathcal{F}_{x}\left[f{\left(x \right)}\right]\left(k\right)"
assert latex(InverseFourierTransform(f(k), k, x)) == \
r"\mathcal{F}^{-1}_{k}\left[f{\left(k \right)}\right]\left(x\right)"
assert latex(CosineTransform(f(x), x, k)) == \
r"\mathcal{COS}_{x}\left[f{\left(x \right)}\right]\left(k\right)"
assert latex(InverseCosineTransform(f(k), k, x)) == \
r"\mathcal{COS}^{-1}_{k}\left[f{\left(k \right)}\right]\left(x\right)"
assert latex(SineTransform(f(x), x, k)) == \
r"\mathcal{SIN}_{x}\left[f{\left(x \right)}\right]\left(k\right)"
assert latex(InverseSineTransform(f(k), k, x)) == \
r"\mathcal{SIN}^{-1}_{k}\left[f{\left(k \right)}\right]\left(x\right)"
def test_PolynomialRingBase():
from sympy.polys.domains import QQ
assert latex(QQ.old_poly_ring(x, y)) == r"\mathbb{Q}\left[x, y\right]"
assert latex(QQ.old_poly_ring(x, y, order="ilex")) == \
r"S_<^{-1}\mathbb{Q}\left[x, y\right]"
def test_categories():
from sympy.categories import (Object, IdentityMorphism,
NamedMorphism, Category, Diagram,
DiagramGrid)
A1 = Object("A1")
A2 = Object("A2")
A3 = Object("A3")
f1 = NamedMorphism(A1, A2, "f1")
f2 = NamedMorphism(A2, A3, "f2")
id_A1 = IdentityMorphism(A1)
K1 = Category("K1")
assert latex(A1) == r"A_{1}"
assert latex(f1) == r"f_{1}:A_{1}\rightarrow A_{2}"
assert latex(id_A1) == r"id:A_{1}\rightarrow A_{1}"
assert latex(f2*f1) == r"f_{2}\circ f_{1}:A_{1}\rightarrow A_{3}"
assert latex(K1) == r"\mathbf{K_{1}}"
d = Diagram()
assert latex(d) == r"\emptyset"
d = Diagram({f1: "unique", f2: S.EmptySet})
assert latex(d) == r"\left\{ f_{2}\circ f_{1}:A_{1}" \
r"\rightarrow A_{3} : \emptyset, \ id:A_{1}\rightarrow " \
r"A_{1} : \emptyset, \ id:A_{2}\rightarrow A_{2} : " \
r"\emptyset, \ id:A_{3}\rightarrow A_{3} : \emptyset, " \
r"\ f_{1}:A_{1}\rightarrow A_{2} : \left\{unique\right\}, " \
r"\ f_{2}:A_{2}\rightarrow A_{3} : \emptyset\right\}"
d = Diagram({f1: "unique", f2: S.EmptySet}, {f2 * f1: "unique"})
assert latex(d) == r"\left\{ f_{2}\circ f_{1}:A_{1}" \
r"\rightarrow A_{3} : \emptyset, \ id:A_{1}\rightarrow " \
r"A_{1} : \emptyset, \ id:A_{2}\rightarrow A_{2} : " \
r"\emptyset, \ id:A_{3}\rightarrow A_{3} : \emptyset, " \
r"\ f_{1}:A_{1}\rightarrow A_{2} : \left\{unique\right\}," \
r" \ f_{2}:A_{2}\rightarrow A_{3} : \emptyset\right\}" \
r"\Longrightarrow \left\{ f_{2}\circ f_{1}:A_{1}" \
r"\rightarrow A_{3} : \left\{unique\right\}\right\}"
# A linear diagram.
A = Object("A")
B = Object("B")
C = Object("C")
f = NamedMorphism(A, B, "f")
g = NamedMorphism(B, C, "g")
d = Diagram([f, g])
grid = DiagramGrid(d)
assert latex(grid) == r"\begin{array}{cc}" + "\n" \
r"A & B \\" + "\n" \
r" & C " + "\n" \
r"\end{array}" + "\n"
def test_Modules():
from sympy.polys.domains import QQ
from sympy.polys.agca import homomorphism
R = QQ.old_poly_ring(x, y)
F = R.free_module(2)
M = F.submodule([x, y], [1, x**2])
assert latex(F) == r"{\mathbb{Q}\left[x, y\right]}^{2}"
assert latex(M) == \
r"\left\langle {\left[ {x},{y} \right]},{\left[ {1},{x^{2}} \right]} \right\rangle"
I = R.ideal(x**2, y)
assert latex(I) == r"\left\langle {x^{2}},{y} \right\rangle"
Q = F / M
assert latex(Q) == \
r"\frac{{\mathbb{Q}\left[x, y\right]}^{2}}{\left\langle {\left[ {x},"\
r"{y} \right]},{\left[ {1},{x^{2}} \right]} \right\rangle}"
assert latex(Q.submodule([1, x**3/2], [2, y])) == \
r"\left\langle {{\left[ {1},{\frac{x^{3}}{2}} \right]} + {\left"\
r"\langle {\left[ {x},{y} \right]},{\left[ {1},{x^{2}} \right]} "\
r"\right\rangle}},{{\left[ {2},{y} \right]} + {\left\langle {\left[ "\
r"{x},{y} \right]},{\left[ {1},{x^{2}} \right]} \right\rangle}} \right\rangle"
h = homomorphism(QQ.old_poly_ring(x).free_module(2),
QQ.old_poly_ring(x).free_module(2), [0, 0])
assert latex(h) == \
r"{\left[\begin{matrix}0 & 0\\0 & 0\end{matrix}\right]} : "\
r"{{\mathbb{Q}\left[x\right]}^{2}} \to {{\mathbb{Q}\left[x\right]}^{2}}"
def test_QuotientRing():
from sympy.polys.domains import QQ
R = QQ.old_poly_ring(x)/[x**2 + 1]
assert latex(R) == \
r"\frac{\mathbb{Q}\left[x\right]}{\left\langle {x^{2} + 1} \right\rangle}"
assert latex(R.one) == r"{1} + {\left\langle {x^{2} + 1} \right\rangle}"
def test_Tr():
#TODO: Handle indices
A, B = symbols('A B', commutative=False)
t = Tr(A*B)
assert latex(t) == r'\operatorname{tr}\left(A B\right)'
def test_Adjoint():
from sympy.matrices import Adjoint, Inverse, Transpose
X = MatrixSymbol('X', 2, 2)
Y = MatrixSymbol('Y', 2, 2)
assert latex(Adjoint(X)) == r'X^{\dagger}'
assert latex(Adjoint(X + Y)) == r'\left(X + Y\right)^{\dagger}'
assert latex(Adjoint(X) + Adjoint(Y)) == r'X^{\dagger} + Y^{\dagger}'
assert latex(Adjoint(X*Y)) == r'\left(X Y\right)^{\dagger}'
assert latex(Adjoint(Y)*Adjoint(X)) == r'Y^{\dagger} X^{\dagger}'
assert latex(Adjoint(X**2)) == r'\left(X^{2}\right)^{\dagger}'
assert latex(Adjoint(X)**2) == r'\left(X^{\dagger}\right)^{2}'
assert latex(Adjoint(Inverse(X))) == r'\left(X^{-1}\right)^{\dagger}'
assert latex(Inverse(Adjoint(X))) == r'\left(X^{\dagger}\right)^{-1}'
assert latex(Adjoint(Transpose(X))) == r'\left(X^{T}\right)^{\dagger}'
assert latex(Transpose(Adjoint(X))) == r'\left(X^{\dagger}\right)^{T}'
assert latex(Transpose(Adjoint(X) + Y)) == r'\left(X^{\dagger} + Y\right)^{T}'
def test_Transpose():
from sympy.matrices import Transpose, MatPow, HadamardPower
X = MatrixSymbol('X', 2, 2)
Y = MatrixSymbol('Y', 2, 2)
assert latex(Transpose(X)) == r'X^{T}'
assert latex(Transpose(X + Y)) == r'\left(X + Y\right)^{T}'
assert latex(Transpose(HadamardPower(X, 2))) == r'\left(X^{\circ {2}}\right)^{T}'
assert latex(HadamardPower(Transpose(X), 2)) == r'\left(X^{T}\right)^{\circ {2}}'
assert latex(Transpose(MatPow(X, 2))) == r'\left(X^{2}\right)^{T}'
assert latex(MatPow(Transpose(X), 2)) == r'\left(X^{T}\right)^{2}'
def test_Hadamard():
from sympy.matrices import HadamardProduct, HadamardPower
from sympy.matrices.expressions import MatAdd, MatMul, MatPow
X = MatrixSymbol('X', 2, 2)
Y = MatrixSymbol('Y', 2, 2)
assert latex(HadamardProduct(X, Y*Y)) == r'X \circ Y^{2}'
assert latex(HadamardProduct(X, Y)*Y) == r'\left(X \circ Y\right) Y'
assert latex(HadamardPower(X, 2)) == r'X^{\circ {2}}'
assert latex(HadamardPower(X, -1)) == r'X^{\circ \left({-1}\right)}'
assert latex(HadamardPower(MatAdd(X, Y), 2)) == \
r'\left(X + Y\right)^{\circ {2}}'
assert latex(HadamardPower(MatMul(X, Y), 2)) == \
r'\left(X Y\right)^{\circ {2}}'
assert latex(HadamardPower(MatPow(X, -1), -1)) == \
r'\left(X^{-1}\right)^{\circ \left({-1}\right)}'
assert latex(MatPow(HadamardPower(X, -1), -1)) == \
r'\left(X^{\circ \left({-1}\right)}\right)^{-1}'
assert latex(HadamardPower(X, n+1)) == \
r'X^{\circ \left({n + 1}\right)}'
def test_ElementwiseApplyFunction():
X = MatrixSymbol('X', 2, 2)
expr = (X.T*X).applyfunc(sin)
assert latex(expr) == r"{\left( d \mapsto \sin{\left(d \right)} \right)}_{\circ}\left({X^{T} X}\right)"
expr = X.applyfunc(Lambda(x, 1/x))
assert latex(expr) == r'{\left( x \mapsto \frac{1}{x} \right)}_{\circ}\left({X}\right)'
def test_ZeroMatrix():
from sympy import ZeroMatrix
assert latex(ZeroMatrix(1, 1), mat_symbol_style='plain') == r"0"
assert latex(ZeroMatrix(1, 1), mat_symbol_style='bold') == r"\mathbf{0}"
def test_OneMatrix():
from sympy import OneMatrix
assert latex(OneMatrix(3, 4), mat_symbol_style='plain') == r"1"
assert latex(OneMatrix(3, 4), mat_symbol_style='bold') == r"\mathbf{1}"
def test_Identity():
from sympy import Identity
assert latex(Identity(1), mat_symbol_style='plain') == r"\mathbb{I}"
assert latex(Identity(1), mat_symbol_style='bold') == r"\mathbf{I}"
def test_boolean_args_order():
syms = symbols('a:f')
expr = And(*syms)
assert latex(expr) == r'a \wedge b \wedge c \wedge d \wedge e \wedge f'
expr = Or(*syms)
assert latex(expr) == r'a \vee b \vee c \vee d \vee e \vee f'
expr = Equivalent(*syms)
assert latex(expr) == \
r'a \Leftrightarrow b \Leftrightarrow c \Leftrightarrow d \Leftrightarrow e \Leftrightarrow f'
expr = Xor(*syms)
assert latex(expr) == \
r'a \veebar b \veebar c \veebar d \veebar e \veebar f'
def test_imaginary():
i = sqrt(-1)
assert latex(i) == r'i'
def test_builtins_without_args():
assert latex(sin) == r'\sin'
assert latex(cos) == r'\cos'
assert latex(tan) == r'\tan'
assert latex(log) == r'\log'
assert latex(Ei) == r'\operatorname{Ei}'
assert latex(zeta) == r'\zeta'
def test_latex_greek_functions():
# bug because capital greeks that have roman equivalents should not use
# \Alpha, \Beta, \Eta, etc.
s = Function('Alpha')
assert latex(s) == r'A'
assert latex(s(x)) == r'A{\left(x \right)}'
s = Function('Beta')
assert latex(s) == r'B'
s = Function('Eta')
assert latex(s) == r'H'
assert latex(s(x)) == r'H{\left(x \right)}'
# bug because sympy.core.numbers.Pi is special
p = Function('Pi')
# assert latex(p(x)) == r'\Pi{\left(x \right)}'
assert latex(p) == r'\Pi'
# bug because not all greeks are included
c = Function('chi')
assert latex(c(x)) == r'\chi{\left(x \right)}'
assert latex(c) == r'\chi'
def test_translate():
s = 'Alpha'
assert translate(s) == r'A'
s = 'Beta'
assert translate(s) == r'B'
s = 'Eta'
assert translate(s) == r'H'
s = 'omicron'
assert translate(s) == r'o'
s = 'Pi'
assert translate(s) == r'\Pi'
s = 'pi'
assert translate(s) == r'\pi'
s = 'LamdaHatDOT'
assert translate(s) == r'\dot{\hat{\Lambda}}'
def test_other_symbols():
from sympy.printing.latex import other_symbols
for s in other_symbols:
assert latex(symbols(s)) == r"" "\\" + s
def test_modifiers():
# Test each modifier individually in the simplest case
# (with funny capitalizations)
assert latex(symbols("xMathring")) == r"\mathring{x}"
assert latex(symbols("xCheck")) == r"\check{x}"
assert latex(symbols("xBreve")) == r"\breve{x}"
assert latex(symbols("xAcute")) == r"\acute{x}"
assert latex(symbols("xGrave")) == r"\grave{x}"
assert latex(symbols("xTilde")) == r"\tilde{x}"
assert latex(symbols("xPrime")) == r"{x}'"
assert latex(symbols("xddDDot")) == r"\ddddot{x}"
assert latex(symbols("xDdDot")) == r"\dddot{x}"
assert latex(symbols("xDDot")) == r"\ddot{x}"
assert latex(symbols("xBold")) == r"\boldsymbol{x}"
assert latex(symbols("xnOrM")) == r"\left\|{x}\right\|"
assert latex(symbols("xAVG")) == r"\left\langle{x}\right\rangle"
assert latex(symbols("xHat")) == r"\hat{x}"
assert latex(symbols("xDot")) == r"\dot{x}"
assert latex(symbols("xBar")) == r"\bar{x}"
assert latex(symbols("xVec")) == r"\vec{x}"
assert latex(symbols("xAbs")) == r"\left|{x}\right|"
assert latex(symbols("xMag")) == r"\left|{x}\right|"
assert latex(symbols("xPrM")) == r"{x}'"
assert latex(symbols("xBM")) == r"\boldsymbol{x}"
# Test strings that are *only* the names of modifiers
assert latex(symbols("Mathring")) == r"Mathring"
assert latex(symbols("Check")) == r"Check"
assert latex(symbols("Breve")) == r"Breve"
assert latex(symbols("Acute")) == r"Acute"
assert latex(symbols("Grave")) == r"Grave"
assert latex(symbols("Tilde")) == r"Tilde"
assert latex(symbols("Prime")) == r"Prime"
assert latex(symbols("DDot")) == r"\dot{D}"
assert latex(symbols("Bold")) == r"Bold"
assert latex(symbols("NORm")) == r"NORm"
assert latex(symbols("AVG")) == r"AVG"
assert latex(symbols("Hat")) == r"Hat"
assert latex(symbols("Dot")) == r"Dot"
assert latex(symbols("Bar")) == r"Bar"
assert latex(symbols("Vec")) == r"Vec"
assert latex(symbols("Abs")) == r"Abs"
assert latex(symbols("Mag")) == r"Mag"
assert latex(symbols("PrM")) == r"PrM"
assert latex(symbols("BM")) == r"BM"
assert latex(symbols("hbar")) == r"\hbar"
# Check a few combinations
assert latex(symbols("xvecdot")) == r"\dot{\vec{x}}"
assert latex(symbols("xDotVec")) == r"\vec{\dot{x}}"
assert latex(symbols("xHATNorm")) == r"\left\|{\hat{x}}\right\|"
# Check a couple big, ugly combinations
assert latex(symbols('xMathringBm_yCheckPRM__zbreveAbs')) == \
r"\boldsymbol{\mathring{x}}^{\left|{\breve{z}}\right|}_{{\check{y}}'}"
assert latex(symbols('alphadothat_nVECDOT__tTildePrime')) == \
r"\hat{\dot{\alpha}}^{{\tilde{t}}'}_{\dot{\vec{n}}}"
def test_greek_symbols():
assert latex(Symbol('alpha')) == r'\alpha'
assert latex(Symbol('beta')) == r'\beta'
assert latex(Symbol('gamma')) == r'\gamma'
assert latex(Symbol('delta')) == r'\delta'
assert latex(Symbol('epsilon')) == r'\epsilon'
assert latex(Symbol('zeta')) == r'\zeta'
assert latex(Symbol('eta')) == r'\eta'
assert latex(Symbol('theta')) == r'\theta'
assert latex(Symbol('iota')) == r'\iota'
assert latex(Symbol('kappa')) == r'\kappa'
assert latex(Symbol('lambda')) == r'\lambda'
assert latex(Symbol('mu')) == r'\mu'
assert latex(Symbol('nu')) == r'\nu'
assert latex(Symbol('xi')) == r'\xi'
assert latex(Symbol('omicron')) == r'o'
assert latex(Symbol('pi')) == r'\pi'
assert latex(Symbol('rho')) == r'\rho'
assert latex(Symbol('sigma')) == r'\sigma'
assert latex(Symbol('tau')) == r'\tau'
assert latex(Symbol('upsilon')) == r'\upsilon'
assert latex(Symbol('phi')) == r'\phi'
assert latex(Symbol('chi')) == r'\chi'
assert latex(Symbol('psi')) == r'\psi'
assert latex(Symbol('omega')) == r'\omega'
assert latex(Symbol('Alpha')) == r'A'
assert latex(Symbol('Beta')) == r'B'
assert latex(Symbol('Gamma')) == r'\Gamma'
assert latex(Symbol('Delta')) == r'\Delta'
assert latex(Symbol('Epsilon')) == r'E'
assert latex(Symbol('Zeta')) == r'Z'
assert latex(Symbol('Eta')) == r'H'
assert latex(Symbol('Theta')) == r'\Theta'
assert latex(Symbol('Iota')) == r'I'
assert latex(Symbol('Kappa')) == r'K'
assert latex(Symbol('Lambda')) == r'\Lambda'
assert latex(Symbol('Mu')) == r'M'
assert latex(Symbol('Nu')) == r'N'
assert latex(Symbol('Xi')) == r'\Xi'
assert latex(Symbol('Omicron')) == r'O'
assert latex(Symbol('Pi')) == r'\Pi'
assert latex(Symbol('Rho')) == r'P'
assert latex(Symbol('Sigma')) == r'\Sigma'
assert latex(Symbol('Tau')) == r'T'
assert latex(Symbol('Upsilon')) == r'\Upsilon'
assert latex(Symbol('Phi')) == r'\Phi'
assert latex(Symbol('Chi')) == r'X'
assert latex(Symbol('Psi')) == r'\Psi'
assert latex(Symbol('Omega')) == r'\Omega'
assert latex(Symbol('varepsilon')) == r'\varepsilon'
assert latex(Symbol('varkappa')) == r'\varkappa'
assert latex(Symbol('varphi')) == r'\varphi'
assert latex(Symbol('varpi')) == r'\varpi'
assert latex(Symbol('varrho')) == r'\varrho'
assert latex(Symbol('varsigma')) == r'\varsigma'
assert latex(Symbol('vartheta')) == r'\vartheta'
def test_fancyset_symbols():
assert latex(S.Rationals) == r'\mathbb{Q}'
assert latex(S.Naturals) == r'\mathbb{N}'
assert latex(S.Naturals0) == r'\mathbb{N}_0'
assert latex(S.Integers) == r'\mathbb{Z}'
assert latex(S.Reals) == r'\mathbb{R}'
assert latex(S.Complexes) == r'\mathbb{C}'
@XFAIL
def test_builtin_without_args_mismatched_names():
assert latex(CosineTransform) == r'\mathcal{COS}'
def test_builtin_no_args():
assert latex(Chi) == r'\operatorname{Chi}'
assert latex(beta) == r'\operatorname{B}'
assert latex(gamma) == r'\Gamma'
assert latex(KroneckerDelta) == r'\delta'
assert latex(DiracDelta) == r'\delta'
assert latex(lowergamma) == r'\gamma'
def test_issue_6853():
p = Function('Pi')
assert latex(p(x)) == r"\Pi{\left(x \right)}"
def test_Mul():
e = Mul(-2, x + 1, evaluate=False)
assert latex(e) == r'- 2 \left(x + 1\right)'
e = Mul(2, x + 1, evaluate=False)
assert latex(e) == r'2 \left(x + 1\right)'
e = Mul(S.Half, x + 1, evaluate=False)
assert latex(e) == r'\frac{x + 1}{2}'
e = Mul(y, x + 1, evaluate=False)
assert latex(e) == r'y \left(x + 1\right)'
e = Mul(-y, x + 1, evaluate=False)
assert latex(e) == r'- y \left(x + 1\right)'
e = Mul(-2, x + 1)
assert latex(e) == r'- 2 x - 2'
e = Mul(2, x + 1)
assert latex(e) == r'2 x + 2'
def test_Pow():
e = Pow(2, 2, evaluate=False)
assert latex(e) == r'2^{2}'
assert latex(x**(Rational(-1, 3))) == r'\frac{1}{\sqrt[3]{x}}'
x2 = Symbol(r'x^2')
assert latex(x2**2) == r'\left(x^{2}\right)^{2}'
def test_issue_7180():
assert latex(Equivalent(x, y)) == r"x \Leftrightarrow y"
assert latex(Not(Equivalent(x, y))) == r"x \not\Leftrightarrow y"
def test_issue_8409():
assert latex(S.Half**n) == r"\left(\frac{1}{2}\right)^{n}"
def test_issue_8470():
from sympy.parsing.sympy_parser import parse_expr
e = parse_expr("-B*A", evaluate=False)
assert latex(e) == r"A \left(- B\right)"
def test_issue_15439():
x = MatrixSymbol('x', 2, 2)
y = MatrixSymbol('y', 2, 2)
assert latex((x * y).subs(y, -y)) == r"x \left(- y\right)"
assert latex((x * y).subs(y, -2*y)) == r"x \left(- 2 y\right)"
assert latex((x * y).subs(x, -x)) == r"- x y"
def test_issue_2934():
assert latex(Symbol(r'\frac{a_1}{b_1}')) == r'\frac{a_1}{b_1}'
def test_issue_10489():
latexSymbolWithBrace = r'C_{x_{0}}'
s = Symbol(latexSymbolWithBrace)
assert latex(s) == latexSymbolWithBrace
assert latex(cos(s)) == r'\cos{\left(C_{x_{0}} \right)}'
def test_issue_12886():
m__1, l__1 = symbols('m__1, l__1')
assert latex(m__1**2 + l__1**2) == \
r'\left(l^{1}\right)^{2} + \left(m^{1}\right)^{2}'
def test_issue_13559():
from sympy.parsing.sympy_parser import parse_expr
expr = parse_expr('5/1', evaluate=False)
assert latex(expr) == r"\frac{5}{1}"
def test_issue_13651():
expr = c + Mul(-1, a + b, evaluate=False)
assert latex(expr) == r"c - \left(a + b\right)"
def test_latex_UnevaluatedExpr():
x = symbols("x")
he = UnevaluatedExpr(1/x)
assert latex(he) == latex(1/x) == r"\frac{1}{x}"
assert latex(he**2) == r"\left(\frac{1}{x}\right)^{2}"
assert latex(he + 1) == r"1 + \frac{1}{x}"
assert latex(x*he) == r"x \frac{1}{x}"
def test_MatrixElement_printing():
# test cases for issue #11821
A = MatrixSymbol("A", 1, 3)
B = MatrixSymbol("B", 1, 3)
C = MatrixSymbol("C", 1, 3)
assert latex(A[0, 0]) == r"A_{0, 0}"
assert latex(3 * A[0, 0]) == r"3 A_{0, 0}"
F = C[0, 0].subs(C, A - B)
assert latex(F) == r"\left(A - B\right)_{0, 0}"
i, j, k = symbols("i j k")
M = MatrixSymbol("M", k, k)
N = MatrixSymbol("N", k, k)
assert latex((M*N)[i, j]) == \
r'\sum_{i_{1}=0}^{k - 1} M_{i, i_{1}} N_{i_{1}, j}'
def test_MatrixSymbol_printing():
# test cases for issue #14237
A = MatrixSymbol("A", 3, 3)
B = MatrixSymbol("B", 3, 3)
C = MatrixSymbol("C", 3, 3)
assert latex(-A) == r"- A"
assert latex(A - A*B - B) == r"A - A B - B"
assert latex(-A*B - A*B*C - B) == r"- A B - A B C - B"
def test_KroneckerProduct_printing():
A = MatrixSymbol('A', 3, 3)
B = MatrixSymbol('B', 2, 2)
assert latex(KroneckerProduct(A, B)) == r'A \otimes B'
def test_Series_printing():
tf1 = TransferFunction(x*y**2 - z, y**3 - t**3, y)
tf2 = TransferFunction(x - y, x + y, y)
tf3 = TransferFunction(t*x**2 - t**w*x + w, t - y, y)
assert latex(Series(tf1, tf2)) == \
r'\left(\frac{x y^{2} - z}{- t^{3} + y^{3}}\right) \left(\frac{x - y}{x + y}\right)'
assert latex(Series(tf1, tf2, tf3)) == \
r'\left(\frac{x y^{2} - z}{- t^{3} + y^{3}}\right) \left(\frac{x - y}{x + y}\right) \left(\frac{t x^{2} - t^{w} x + w}{t - y}\right)'
assert latex(Series(-tf2, tf1)) == \
r'\left(\frac{- x + y}{x + y}\right) \left(\frac{x y^{2} - z}{- t^{3} + y^{3}}\right)'
M_1 = Matrix([[5/s], [5/(2*s)]])
T_1 = TransferFunctionMatrix.from_Matrix(M_1, s)
M_2 = Matrix([[5, 6*s**3]])
T_2 = TransferFunctionMatrix.from_Matrix(M_2, s)
# Brackets
assert latex(T_1*(T_2 + T_2)) == \
r'\left[\begin{matrix}\frac{5}{s}\\\frac{5}{2 s}\end{matrix}\right]_\tau\cdot\left(\left[\begin{matrix}\frac{5}{1} &' \
r' \frac{6 s^{3}}{1}\end{matrix}\right]_\tau + \left[\begin{matrix}\frac{5}{1} & \frac{6 s^{3}}{1}\end{matrix}\right]_\tau\right)' \
== latex(MIMOSeries(MIMOParallel(T_2, T_2), T_1))
# No Brackets
M_3 = Matrix([[5, 6], [6, 5/s]])
T_3 = TransferFunctionMatrix.from_Matrix(M_3, s)
assert latex(T_1*T_2 + T_3) == r'\left[\begin{matrix}\frac{5}{s}\\\frac{5}{2 s}\end{matrix}\right]_\tau\cdot\left[\begin{matrix}' \
r'\frac{5}{1} & \frac{6 s^{3}}{1}\end{matrix}\right]_\tau + \left[\begin{matrix}\frac{5}{1} & \frac{6}{1}\\\frac{6}{1} & ' \
r'\frac{5}{s}\end{matrix}\right]_\tau' == latex(MIMOParallel(MIMOSeries(T_2, T_1), T_3))
def test_TransferFunction_printing():
tf1 = TransferFunction(x - 1, x + 1, x)
assert latex(tf1) == r"\frac{x - 1}{x + 1}"
tf2 = TransferFunction(x + 1, 2 - y, x)
assert latex(tf2) == r"\frac{x + 1}{2 - y}"
tf3 = TransferFunction(y, y**2 + 2*y + 3, y)
assert latex(tf3) == r"\frac{y}{y^{2} + 2 y + 3}"
def test_Parallel_printing():
tf1 = TransferFunction(x*y**2 - z, y**3 - t**3, y)
tf2 = TransferFunction(x - y, x + y, y)
assert latex(Parallel(tf1, tf2)) == \
r'\frac{x y^{2} - z}{- t^{3} + y^{3}} + \frac{x - y}{x + y}'
assert latex(Parallel(-tf2, tf1)) == \
r'\frac{- x + y}{x + y} + \frac{x y^{2} - z}{- t^{3} + y^{3}}'
M_1 = Matrix([[5, 6], [6, 5/s]])
T_1 = TransferFunctionMatrix.from_Matrix(M_1, s)
M_2 = Matrix([[5/s, 6], [6, 5/(s - 1)]])
T_2 = TransferFunctionMatrix.from_Matrix(M_2, s)
M_3 = Matrix([[6, 5/(s*(s - 1))], [5, 6]])
T_3 = TransferFunctionMatrix.from_Matrix(M_3, s)
assert latex(T_1 + T_2 + T_3) == r'\left[\begin{matrix}\frac{5}{1} & \frac{6}{1}\\\frac{6}{1} & \frac{5}{s}\end{matrix}\right]' \
r'_\tau + \left[\begin{matrix}\frac{5}{s} & \frac{6}{1}\\\frac{6}{1} & \frac{5}{s - 1}\end{matrix}\right]_\tau + \left[\begin{matrix}' \
r'\frac{6}{1} & \frac{5}{s \left(s - 1\right)}\\\frac{5}{1} & \frac{6}{1}\end{matrix}\right]_\tau' \
== latex(MIMOParallel(T_1, T_2, T_3)) == latex(MIMOParallel(T_1, MIMOParallel(T_2, T_3))) == latex(MIMOParallel(MIMOParallel(T_1, T_2), T_3))
def test_TransferFunctionMatrix_printing():
tf1 = TransferFunction(p, p + x, p)
tf2 = TransferFunction(-s + p, p + s, p)
tf3 = TransferFunction(p, y**2 + 2*y + 3, p)
assert latex(TransferFunctionMatrix([[tf1], [tf2]])) == \
r'\left[\begin{matrix}\frac{p}{p + x}\\\frac{p - s}{p + s}\end{matrix}\right]_\tau'
assert latex(TransferFunctionMatrix([[tf1, tf2], [tf3, -tf1]])) == \
r'\left[\begin{matrix}\frac{p}{p + x} & \frac{p - s}{p + s}\\\frac{p}{y^{2} + 2 y + 3} & \frac{\left(-1\right) p}{p + x}\end{matrix}\right]_\tau'
def test_Feedback_printing():
tf1 = TransferFunction(p, p + x, p)
tf2 = TransferFunction(-s + p, p + s, p)
# Negative Feedback (Default)
assert latex(Feedback(tf1, tf2)) == \
r'\frac{\frac{p}{p + x}}{\frac{1}{1} + \left(\frac{p}{p + x}\right) \left(\frac{p - s}{p + s}\right)}'
assert latex(Feedback(tf1*tf2, TransferFunction(1, 1, p))) == \
r'\frac{\left(\frac{p}{p + x}\right) \left(\frac{p - s}{p + s}\right)}{\frac{1}{1} + \left(\frac{p}{p + x}\right) \left(\frac{p - s}{p + s}\right)}'
# Positive Feedback
assert latex(Feedback(tf1, tf2, 1)) == \
r'\frac{\frac{p}{p + x}}{\frac{1}{1} - \left(\frac{p}{p + x}\right) \left(\frac{p - s}{p + s}\right)}'
assert latex(Feedback(tf1*tf2, sign=1)) == \
r'\frac{\left(\frac{p}{p + x}\right) \left(\frac{p - s}{p + s}\right)}{\frac{1}{1} - \left(\frac{p}{p + x}\right) \left(\frac{p - s}{p + s}\right)}'
def test_MIMOFeedback_printing():
tf1 = TransferFunction(1, s, s)
tf2 = TransferFunction(s, s**2 - 1, s)
tf3 = TransferFunction(s, s - 1, s)
tf4 = TransferFunction(s**2, s**2 - 1, s)
tfm_1 = TransferFunctionMatrix([[tf1, tf2], [tf3, tf4]])
tfm_2 = TransferFunctionMatrix([[tf4, tf3], [tf2, tf1]])
# Negative Feedback (Default)
assert latex(MIMOFeedback(tfm_1, tfm_2)) == \
r'\left(I_{\tau} + \left[\begin{matrix}\frac{1}{s} & \frac{s}{s^{2} - 1}\\\frac{s}{s - 1} & \frac{s^{2}}{s^{2} - 1}\end{matrix}\right]_\tau\cdot\left[' \
r'\begin{matrix}\frac{s^{2}}{s^{2} - 1} & \frac{s}{s - 1}\\\frac{s}{s^{2} - 1} & \frac{1}{s}\end{matrix}\right]_\tau\right)^{-1} \cdot \left[\begin{matrix}' \
r'\frac{1}{s} & \frac{s}{s^{2} - 1}\\\frac{s}{s - 1} & \frac{s^{2}}{s^{2} - 1}\end{matrix}\right]_\tau'
# Positive Feedback
assert latex(MIMOFeedback(tfm_1*tfm_2, tfm_1, 1)) == \
r'\left(I_{\tau} - \left[\begin{matrix}\frac{1}{s} & \frac{s}{s^{2} - 1}\\\frac{s}{s - 1} & \frac{s^{2}}{s^{2} - 1}\end{matrix}\right]_\tau\cdot\left' \
r'[\begin{matrix}\frac{s^{2}}{s^{2} - 1} & \frac{s}{s - 1}\\\frac{s}{s^{2} - 1} & \frac{1}{s}\end{matrix}\right]_\tau\cdot\left[\begin{matrix}\frac{1}{s} & \frac{s}{s^{2} - 1}' \
r'\\\frac{s}{s - 1} & \frac{s^{2}}{s^{2} - 1}\end{matrix}\right]_\tau\right)^{-1} \cdot \left[\begin{matrix}\frac{1}{s} & \frac{s}{s^{2} - 1}' \
r'\\\frac{s}{s - 1} & \frac{s^{2}}{s^{2} - 1}\end{matrix}\right]_\tau\cdot\left[\begin{matrix}\frac{s^{2}}{s^{2} - 1} & \frac{s}{s - 1}\\\frac{s}{s^{2} - 1}' \
r' & \frac{1}{s}\end{matrix}\right]_\tau'
def test_Quaternion_latex_printing():
q = Quaternion(x, y, z, t)
assert latex(q) == r"x + y i + z j + t k"
q = Quaternion(x, y, z, x*t)
assert latex(q) == r"x + y i + z j + t x k"
q = Quaternion(x, y, z, x + t)
assert latex(q) == r"x + y i + z j + \left(t + x\right) k"
def test_TensorProduct_printing():
from sympy.tensor.functions import TensorProduct
A = MatrixSymbol("A", 3, 3)
B = MatrixSymbol("B", 3, 3)
assert latex(TensorProduct(A, B)) == r"A \otimes B"
def test_WedgeProduct_printing():
from sympy.diffgeom.rn import R2
from sympy.diffgeom import WedgeProduct
wp = WedgeProduct(R2.dx, R2.dy)
assert latex(wp) == r"\operatorname{d}x \wedge \operatorname{d}y"
def test_issue_9216():
expr_1 = Pow(1, -1, evaluate=False)
assert latex(expr_1) == r"1^{-1}"
expr_2 = Pow(1, Pow(1, -1, evaluate=False), evaluate=False)
assert latex(expr_2) == r"1^{1^{-1}}"
expr_3 = Pow(3, -2, evaluate=False)
assert latex(expr_3) == r"\frac{1}{9}"
expr_4 = Pow(1, -2, evaluate=False)
assert latex(expr_4) == r"1^{-2}"
def test_latex_printer_tensor():
from sympy.tensor.tensor import TensorIndexType, tensor_indices, TensorHead, tensor_heads
L = TensorIndexType("L")
i, j, k, l = tensor_indices("i j k l", L)
i0 = tensor_indices("i_0", L)
A, B, C, D = tensor_heads("A B C D", [L])
H = TensorHead("H", [L, L])
K = TensorHead("K", [L, L, L, L])
assert latex(i) == r"{}^{i}"
assert latex(-i) == r"{}_{i}"
expr = A(i)
assert latex(expr) == r"A{}^{i}"
expr = A(i0)
assert latex(expr) == r"A{}^{i_{0}}"
expr = A(-i)
assert latex(expr) == r"A{}_{i}"
expr = -3*A(i)
assert latex(expr) == r"-3A{}^{i}"
expr = K(i, j, -k, -i0)
assert latex(expr) == r"K{}^{ij}{}_{ki_{0}}"
expr = K(i, -j, -k, i0)
assert latex(expr) == r"K{}^{i}{}_{jk}{}^{i_{0}}"
expr = K(i, -j, k, -i0)
assert latex(expr) == r"K{}^{i}{}_{j}{}^{k}{}_{i_{0}}"
expr = H(i, -j)
assert latex(expr) == r"H{}^{i}{}_{j}"
expr = H(i, j)
assert latex(expr) == r"H{}^{ij}"
expr = H(-i, -j)
assert latex(expr) == r"H{}_{ij}"
expr = (1+x)*A(i)
assert latex(expr) == r"\left(x + 1\right)A{}^{i}"
expr = H(i, -i)
assert latex(expr) == r"H{}^{L_{0}}{}_{L_{0}}"
expr = H(i, -j)*A(j)*B(k)
assert latex(expr) == r"H{}^{i}{}_{L_{0}}A{}^{L_{0}}B{}^{k}"
expr = A(i) + 3*B(i)
assert latex(expr) == r"3B{}^{i} + A{}^{i}"
# Test ``TensorElement``:
from sympy.tensor.tensor import TensorElement
expr = TensorElement(K(i, j, k, l), {i: 3, k: 2})
assert latex(expr) == r'K{}^{i=3,j,k=2,l}'
expr = TensorElement(K(i, j, k, l), {i: 3})
assert latex(expr) == r'K{}^{i=3,jkl}'
expr = TensorElement(K(i, -j, k, l), {i: 3, k: 2})
assert latex(expr) == r'K{}^{i=3}{}_{j}{}^{k=2,l}'
expr = TensorElement(K(i, -j, k, -l), {i: 3, k: 2})
assert latex(expr) == r'K{}^{i=3}{}_{j}{}^{k=2}{}_{l}'
expr = TensorElement(K(i, j, -k, -l), {i: 3, -k: 2})
assert latex(expr) == r'K{}^{i=3,j}{}_{k=2,l}'
expr = TensorElement(K(i, j, -k, -l), {i: 3})
assert latex(expr) == r'K{}^{i=3,j}{}_{kl}'
expr = PartialDerivative(A(i), A(i))
assert latex(expr) == r"\frac{\partial}{\partial {A{}^{L_{0}}}}{A{}^{L_{0}}}"
expr = PartialDerivative(A(-i), A(-j))
assert latex(expr) == r"\frac{\partial}{\partial {A{}_{j}}}{A{}_{i}}"
expr = PartialDerivative(K(i, j, -k, -l), A(m), A(-n))
assert latex(expr) == r"\frac{\partial^{2}}{\partial {A{}^{m}} \partial {A{}_{n}}}{K{}^{ij}{}_{kl}}"
expr = PartialDerivative(B(-i) + A(-i), A(-j), A(-n))
assert latex(expr) == r"\frac{\partial^{2}}{\partial {A{}_{j}} \partial {A{}_{n}}}{\left(A{}_{i} + B{}_{i}\right)}"
expr = PartialDerivative(3*A(-i), A(-j), A(-n))
assert latex(expr) == r"\frac{\partial^{2}}{\partial {A{}_{j}} \partial {A{}_{n}}}{\left(3A{}_{i}\right)}"
def test_multiline_latex():
a, b, c, d, e, f = symbols('a b c d e f')
expr = -a + 2*b -3*c +4*d -5*e
expected = r"\begin{eqnarray}" + "\n"\
r"f & = &- a \nonumber\\" + "\n"\
r"& & + 2 b \nonumber\\" + "\n"\
r"& & - 3 c \nonumber\\" + "\n"\
r"& & + 4 d \nonumber\\" + "\n"\
r"& & - 5 e " + "\n"\
r"\end{eqnarray}"
assert multiline_latex(f, expr, environment="eqnarray") == expected
expected2 = r'\begin{eqnarray}' + '\n'\
r'f & = &- a + 2 b \nonumber\\' + '\n'\
r'& & - 3 c + 4 d \nonumber\\' + '\n'\
r'& & - 5 e ' + '\n'\
r'\end{eqnarray}'
assert multiline_latex(f, expr, 2, environment="eqnarray") == expected2
expected3 = r'\begin{eqnarray}' + '\n'\
r'f & = &- a + 2 b - 3 c \nonumber\\'+ '\n'\
r'& & + 4 d - 5 e ' + '\n'\
r'\end{eqnarray}'
assert multiline_latex(f, expr, 3, environment="eqnarray") == expected3
expected3dots = r'\begin{eqnarray}' + '\n'\
r'f & = &- a + 2 b - 3 c \dots\nonumber\\'+ '\n'\
r'& & + 4 d - 5 e ' + '\n'\
r'\end{eqnarray}'
assert multiline_latex(f, expr, 3, environment="eqnarray", use_dots=True) == expected3dots
expected3align = r'\begin{align*}' + '\n'\
r'f = &- a + 2 b - 3 c \\'+ '\n'\
r'& + 4 d - 5 e ' + '\n'\
r'\end{align*}'
assert multiline_latex(f, expr, 3) == expected3align
assert multiline_latex(f, expr, 3, environment='align*') == expected3align
expected2ieee = r'\begin{IEEEeqnarray}{rCl}' + '\n'\
r'f & = &- a + 2 b \nonumber\\' + '\n'\
r'& & - 3 c + 4 d \nonumber\\' + '\n'\
r'& & - 5 e ' + '\n'\
r'\end{IEEEeqnarray}'
assert multiline_latex(f, expr, 2, environment="IEEEeqnarray") == expected2ieee
raises(ValueError, lambda: multiline_latex(f, expr, environment="foo"))
def test_issue_15353():
a, x = symbols('a x')
# Obtained from nonlinsolve([(sin(a*x)),cos(a*x)],[x,a])
sol = ConditionSet(
Tuple(x, a), Eq(sin(a*x), 0) & Eq(cos(a*x), 0), S.Complexes**2)
assert latex(sol) == \
r'\left\{\left( x, \ a\right)\; \middle|\; \left( x, \ a\right) \in ' \
r'\mathbb{C}^{2} \wedge \sin{\left(a x \right)} = 0 \wedge ' \
r'\cos{\left(a x \right)} = 0 \right\}'
def test_trace():
# Issue 15303
from sympy import trace
A = MatrixSymbol("A", 2, 2)
assert latex(trace(A)) == r"\operatorname{tr}\left(A \right)"
assert latex(trace(A**2)) == r"\operatorname{tr}\left(A^{2} \right)"
def test_print_basic():
# Issue 15303
from sympy import Basic, Expr
# dummy class for testing printing where the function is not
# implemented in latex.py
class UnimplementedExpr(Expr):
def __new__(cls, e):
return Basic.__new__(cls, e)
# dummy function for testing
def unimplemented_expr(expr):
return UnimplementedExpr(expr).doit()
# override class name to use superscript / subscript
def unimplemented_expr_sup_sub(expr):
result = UnimplementedExpr(expr)
result.__class__.__name__ = 'UnimplementedExpr_x^1'
return result
assert latex(unimplemented_expr(x)) == r'UnimplementedExpr\left(x\right)'
assert latex(unimplemented_expr(x**2)) == \
r'UnimplementedExpr\left(x^{2}\right)'
assert latex(unimplemented_expr_sup_sub(x)) == \
r'UnimplementedExpr^{1}_{x}\left(x\right)'
def test_MatrixSymbol_bold():
# Issue #15871
from sympy import trace
A = MatrixSymbol("A", 2, 2)
assert latex(trace(A), mat_symbol_style='bold') == \
r"\operatorname{tr}\left(\mathbf{A} \right)"
assert latex(trace(A), mat_symbol_style='plain') == \
r"\operatorname{tr}\left(A \right)"
A = MatrixSymbol("A", 3, 3)
B = MatrixSymbol("B", 3, 3)
C = MatrixSymbol("C", 3, 3)
assert latex(-A, mat_symbol_style='bold') == r"- \mathbf{A}"
assert latex(A - A*B - B, mat_symbol_style='bold') == \
r"\mathbf{A} - \mathbf{A} \mathbf{B} - \mathbf{B}"
assert latex(-A*B - A*B*C - B, mat_symbol_style='bold') == \
r"- \mathbf{A} \mathbf{B} - \mathbf{A} \mathbf{B} \mathbf{C} - \mathbf{B}"
A_k = MatrixSymbol("A_k", 3, 3)
assert latex(A_k, mat_symbol_style='bold') == r"\mathbf{A}_{k}"
A = MatrixSymbol(r"\nabla_k", 3, 3)
assert latex(A, mat_symbol_style='bold') == r"\mathbf{\nabla}_{k}"
def test_AppliedPermutation():
p = Permutation(0, 1, 2)
x = Symbol('x')
assert latex(AppliedPermutation(p, x)) == \
r'\sigma_{\left( 0\; 1\; 2\right)}(x)'
def test_PermutationMatrix():
p = Permutation(0, 1, 2)
assert latex(PermutationMatrix(p)) == r'P_{\left( 0\; 1\; 2\right)}'
p = Permutation(0, 3)(1, 2)
assert latex(PermutationMatrix(p)) == \
r'P_{\left( 0\; 3\right)\left( 1\; 2\right)}'
def test_imaginary_unit():
assert latex(1 + I) == r'1 + i'
assert latex(1 + I, imaginary_unit='i') == r'1 + i'
assert latex(1 + I, imaginary_unit='j') == r'1 + j'
assert latex(1 + I, imaginary_unit='foo') == r'1 + foo'
assert latex(I, imaginary_unit="ti") == r'\text{i}'
assert latex(I, imaginary_unit="tj") == r'\text{j}'
def test_text_re_im():
assert latex(im(x), gothic_re_im=True) == r'\Im{\left(x\right)}'
assert latex(im(x), gothic_re_im=False) == r'\operatorname{im}{\left(x\right)}'
assert latex(re(x), gothic_re_im=True) == r'\Re{\left(x\right)}'
assert latex(re(x), gothic_re_im=False) == r'\operatorname{re}{\left(x\right)}'
def test_latex_diffgeom():
from sympy.diffgeom import Manifold, Patch, CoordSystem, BaseScalarField, Differential
from sympy.diffgeom.rn import R2
x,y = symbols('x y', real=True)
m = Manifold('M', 2)
assert latex(m) == r'\text{M}'
p = Patch('P', m)
assert latex(p) == r'\text{P}_{\text{M}}'
rect = CoordSystem('rect', p, [x, y])
assert latex(rect) == r'\text{rect}^{\text{P}}_{\text{M}}'
b = BaseScalarField(rect, 0)
assert latex(b) == r'\mathbf{x}'
g = Function('g')
s_field = g(R2.x, R2.y)
assert latex(Differential(s_field)) == \
r'\operatorname{d}\left(g{\left(\mathbf{x},\mathbf{y} \right)}\right)'
def test_unit_printing():
assert latex(5*meter) == r'5 \text{m}'
assert latex(3*gibibyte) == r'3 \text{gibibyte}'
assert latex(4*microgram/second) == r'\frac{4 \mu\text{g}}{\text{s}}'
def test_issue_17092():
x_star = Symbol('x^*')
assert latex(Derivative(x_star, x_star,2)) == r'\frac{d^{2}}{d \left(x^{*}\right)^{2}} x^{*}'
def test_latex_decimal_separator():
x, y, z, t = symbols('x y z t')
k, m, n = symbols('k m n', integer=True)
f, g, h = symbols('f g h', cls=Function)
# comma decimal_separator
assert(latex([1, 2.3, 4.5], decimal_separator='comma') == r'\left[ 1; \ 2{,}3; \ 4{,}5\right]')
assert(latex(FiniteSet(1, 2.3, 4.5), decimal_separator='comma') == r'\left\{1; 2{,}3; 4{,}5\right\}')
assert(latex((1, 2.3, 4.6), decimal_separator = 'comma') == r'\left( 1; \ 2{,}3; \ 4{,}6\right)')
assert(latex((1,), decimal_separator='comma') == r'\left( 1;\right)')
# period decimal_separator
assert(latex([1, 2.3, 4.5], decimal_separator='period') == r'\left[ 1, \ 2.3, \ 4.5\right]' )
assert(latex(FiniteSet(1, 2.3, 4.5), decimal_separator='period') == r'\left\{1, 2.3, 4.5\right\}')
assert(latex((1, 2.3, 4.6), decimal_separator = 'period') == r'\left( 1, \ 2.3, \ 4.6\right)')
assert(latex((1,), decimal_separator='period') == r'\left( 1,\right)')
# default decimal_separator
assert(latex([1, 2.3, 4.5]) == r'\left[ 1, \ 2.3, \ 4.5\right]')
assert(latex(FiniteSet(1, 2.3, 4.5)) == r'\left\{1, 2.3, 4.5\right\}')
assert(latex((1, 2.3, 4.6)) == r'\left( 1, \ 2.3, \ 4.6\right)')
assert(latex((1,)) == r'\left( 1,\right)')
assert(latex(Mul(3.4,5.3), decimal_separator = 'comma') == r'18{,}02')
assert(latex(3.4*5.3, decimal_separator = 'comma') == r'18{,}02')
x = symbols('x')
y = symbols('y')
z = symbols('z')
assert(latex(x*5.3 + 2**y**3.4 + 4.5 + z, decimal_separator = 'comma') == r'2^{y^{3{,}4}} + 5{,}3 x + z + 4{,}5')
assert(latex(0.987, decimal_separator='comma') == r'0{,}987')
assert(latex(S(0.987), decimal_separator='comma') == r'0{,}987')
assert(latex(.3, decimal_separator='comma') == r'0{,}3')
assert(latex(S(.3), decimal_separator='comma') == r'0{,}3')
assert(latex(5.8*10**(-7), decimal_separator='comma') == r'5{,}8 \cdot 10^{-7}')
assert(latex(S(5.7)*10**(-7), decimal_separator='comma') == r'5{,}7 \cdot 10^{-7}')
assert(latex(S(5.7*10**(-7)), decimal_separator='comma') == r'5{,}7 \cdot 10^{-7}')
x = symbols('x')
assert(latex(1.2*x+3.4, decimal_separator='comma') == r'1{,}2 x + 3{,}4')
assert(latex(FiniteSet(1, 2.3, 4.5), decimal_separator='period') == r'\left\{1, 2.3, 4.5\right\}')
# Error Handling tests
raises(ValueError, lambda: latex([1,2.3,4.5], decimal_separator='non_existing_decimal_separator_in_list'))
raises(ValueError, lambda: latex(FiniteSet(1,2.3,4.5), decimal_separator='non_existing_decimal_separator_in_set'))
raises(ValueError, lambda: latex((1,2.3,4.5), decimal_separator='non_existing_decimal_separator_in_tuple'))
def test_Str():
from sympy.core.symbol import Str
assert str(Str('x')) == r'x'
def test_latex_escape():
assert latex_escape(r"~^\&%$#_{}") == "".join([
r'\textasciitilde',
r'\textasciicircum',
r'\textbackslash',
r'\&',
r'\%',
r'\$',
r'\#',
r'\_',
r'\{',
r'\}',
])
def test_emptyPrinter():
class MyObject:
def __repr__(self):
return "<MyObject with {...}>"
# unknown objects are monospaced
assert latex(MyObject()) == r"\mathtt{\text{<MyObject with \{...\}>}}"
# even if they are nested within other objects
assert latex((MyObject(),)) == r"\left( \mathtt{\text{<MyObject with \{...\}>}},\right)"
def test_global_settings():
import inspect
# settings should be visible in the signature of `latex`
assert inspect.signature(latex).parameters['imaginary_unit'].default == r'i'
assert latex(I) == r'i'
try:
# but changing the defaults...
LatexPrinter.set_global_settings(imaginary_unit='j')
# ... should change the signature
assert inspect.signature(latex).parameters['imaginary_unit'].default == r'j'
assert latex(I) == r'j'
finally:
# there's no public API to undo this, but we need to make sure we do
# so as not to impact other tests
del LatexPrinter._global_settings['imaginary_unit']
# check we really did undo it
assert inspect.signature(latex).parameters['imaginary_unit'].default == r'i'
assert latex(I) == r'i'
def test_pickleable():
# this tests that the _PrintFunction instance is pickleable
import pickle
assert pickle.loads(pickle.dumps(latex)) is latex
def test_printing_latex_array_expressions():
assert latex(ArraySymbol("A", 2, 3, 4)) == "A"
assert latex(ArrayElement("A", (2, 1/(1-x), 0))) == "{{A}_{2, \\frac{1}{1 - x}, 0}}"
| 41.799929
| 186
| 0.559275
|
dc9057963a90bad055d5587d41633ebdec41fcf6
| 80,958
|
py
|
Python
|
psutil/tests/test_linux.py
|
alexfurtunato/psutil
|
81ef4c9185650213c03122c95856ad3991b5656d
|
[
"BSD-3-Clause"
] | null | null | null |
psutil/tests/test_linux.py
|
alexfurtunato/psutil
|
81ef4c9185650213c03122c95856ad3991b5656d
|
[
"BSD-3-Clause"
] | null | null | null |
psutil/tests/test_linux.py
|
alexfurtunato/psutil
|
81ef4c9185650213c03122c95856ad3991b5656d
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python
# Copyright (c) 2009, Giampaolo Rodola'. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Linux specific tests."""
from __future__ import division
import collections
import contextlib
import errno
import glob
import io
import os
import re
import shutil
import socket
import struct
import tempfile
import textwrap
import time
import warnings
import psutil
from psutil import LINUX
from psutil._compat import basestring
from psutil._compat import PY3
from psutil._compat import u
from psutil.tests import call_until
from psutil.tests import HAS_BATTERY
from psutil.tests import HAS_CPU_FREQ
from psutil.tests import HAS_RLIMIT
from psutil.tests import MEMORY_TOLERANCE
from psutil.tests import mock
from psutil.tests import PYPY
from psutil.tests import pyrun
from psutil.tests import reap_children
from psutil.tests import reload_module
from psutil.tests import retry_before_failing
from psutil.tests import run_test_module_by_name
from psutil.tests import safe_rmpath
from psutil.tests import sh
from psutil.tests import skip_on_not_implemented
from psutil.tests import TESTFN
from psutil.tests import ThreadTask
from psutil.tests import TRAVIS
from psutil.tests import unittest
from psutil.tests import which
HERE = os.path.abspath(os.path.dirname(__file__))
SIOCGIFADDR = 0x8915
SIOCGIFCONF = 0x8912
SIOCGIFHWADDR = 0x8927
if LINUX:
SECTOR_SIZE = 512
# =====================================================================
# --- utils
# =====================================================================
def get_ipv4_address(ifname):
import fcntl
ifname = ifname[:15]
if PY3:
ifname = bytes(ifname, 'ascii')
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
with contextlib.closing(s):
return socket.inet_ntoa(
fcntl.ioctl(s.fileno(),
SIOCGIFADDR,
struct.pack('256s', ifname))[20:24])
def get_mac_address(ifname):
import fcntl
ifname = ifname[:15]
if PY3:
ifname = bytes(ifname, 'ascii')
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
with contextlib.closing(s):
info = fcntl.ioctl(
s.fileno(), SIOCGIFHWADDR, struct.pack('256s', ifname))
if PY3:
def ord(x):
return x
else:
import __builtin__
ord = __builtin__.ord
return ''.join(['%02x:' % ord(char) for char in info[18:24]])[:-1]
def free_swap():
"""Parse 'free' cmd and return swap memory's s total, used and free
values.
"""
out = sh('free -b')
lines = out.split('\n')
for line in lines:
if line.startswith('Swap'):
_, total, used, free = line.split()
nt = collections.namedtuple('free', 'total used free')
return nt(int(total), int(used), int(free))
raise ValueError(
"can't find 'Swap' in 'free' output:\n%s" % '\n'.join(lines))
def free_physmem():
"""Parse 'free' cmd and return physical memory's total, used
and free values.
"""
# Note: free can have 2 different formats, invalidating 'shared'
# and 'cached' memory which may have different positions so we
# do not return them.
# https://github.com/giampaolo/psutil/issues/538#issuecomment-57059946
out = sh('free -b')
lines = out.split('\n')
for line in lines:
if line.startswith('Mem'):
total, used, free, shared = \
[int(x) for x in line.split()[1:5]]
nt = collections.namedtuple(
'free', 'total used free shared output')
return nt(total, used, free, shared, out)
raise ValueError(
"can't find 'Mem' in 'free' output:\n%s" % '\n'.join(lines))
def vmstat(stat):
out = sh("vmstat -s")
for line in out.split("\n"):
line = line.strip()
if stat in line:
return int(line.split(' ')[0])
raise ValueError("can't find %r in 'vmstat' output" % stat)
def get_free_version_info():
out = sh("free -V").strip()
return tuple(map(int, out.split()[-1].split('.')))
@contextlib.contextmanager
def mock_open_content(for_path, content):
"""Mock open() builtin and forces it to return a certain `content`
on read() if the path being opened matches `for_path`.
"""
def open_mock(name, *args, **kwargs):
if name == for_path:
if PY3:
if isinstance(content, basestring):
return io.StringIO(content)
else:
return io.BytesIO(content)
else:
return io.BytesIO(content)
else:
return orig_open(name, *args, **kwargs)
orig_open = open
patch_point = 'builtins.open' if PY3 else '__builtin__.open'
with mock.patch(patch_point, create=True, side_effect=open_mock) as m:
yield m
@contextlib.contextmanager
def mock_open_exception(for_path, exc):
"""Mock open() builtin and raises `exc` if the path being opened
matches `for_path`.
"""
def open_mock(name, *args, **kwargs):
if name == for_path:
raise exc
else:
return orig_open(name, *args, **kwargs)
orig_open = open
patch_point = 'builtins.open' if PY3 else '__builtin__.open'
with mock.patch(patch_point, create=True, side_effect=open_mock) as m:
yield m
# =====================================================================
# --- system virtual memory
# =====================================================================
@unittest.skipIf(not LINUX, "LINUX only")
class TestSystemVirtualMemory(unittest.TestCase):
def test_total(self):
# free_value = free_physmem().total
# psutil_value = psutil.virtual_memory().total
# self.assertEqual(free_value, psutil_value)
vmstat_value = vmstat('total memory') * 1024
psutil_value = psutil.virtual_memory().total
self.assertAlmostEqual(vmstat_value, psutil_value)
# Older versions of procps used slab memory to calculate used memory.
# This got changed in:
# https://gitlab.com/procps-ng/procps/commit/
# 05d751c4f076a2f0118b914c5e51cfbb4762ad8e
@unittest.skipIf(LINUX and get_free_version_info() < (3, 3, 12),
"old free version")
@retry_before_failing()
def test_used(self):
free = free_physmem()
free_value = free.used
psutil_value = psutil.virtual_memory().used
self.assertAlmostEqual(
free_value, psutil_value, delta=MEMORY_TOLERANCE,
msg='%s %s \n%s' % (free_value, psutil_value, free.output))
@unittest.skipIf(TRAVIS, "unreliable on TRAVIS")
@retry_before_failing()
def test_free(self):
# _, _, free_value, _ = free_physmem()
# psutil_value = psutil.virtual_memory().free
# self.assertAlmostEqual(
# free_value, psutil_value, delta=MEMORY_TOLERANCE)
vmstat_value = vmstat('free memory') * 1024
psutil_value = psutil.virtual_memory().free
self.assertAlmostEqual(
vmstat_value, psutil_value, delta=MEMORY_TOLERANCE)
@retry_before_failing()
def test_buffers(self):
vmstat_value = vmstat('buffer memory') * 1024
psutil_value = psutil.virtual_memory().buffers
self.assertAlmostEqual(
vmstat_value, psutil_value, delta=MEMORY_TOLERANCE)
# https://travis-ci.org/giampaolo/psutil/jobs/226719664
@unittest.skipIf(TRAVIS, "unreliable on TRAVIS")
@retry_before_failing()
def test_active(self):
vmstat_value = vmstat('active memory') * 1024
psutil_value = psutil.virtual_memory().active
self.assertAlmostEqual(
vmstat_value, psutil_value, delta=MEMORY_TOLERANCE)
# https://travis-ci.org/giampaolo/psutil/jobs/227242952
@unittest.skipIf(TRAVIS, "unreliable on TRAVIS")
@retry_before_failing()
def test_inactive(self):
vmstat_value = vmstat('inactive memory') * 1024
psutil_value = psutil.virtual_memory().inactive
self.assertAlmostEqual(
vmstat_value, psutil_value, delta=MEMORY_TOLERANCE)
@retry_before_failing()
def test_shared(self):
free = free_physmem()
free_value = free.shared
if free_value == 0:
raise unittest.SkipTest("free does not support 'shared' column")
psutil_value = psutil.virtual_memory().shared
self.assertAlmostEqual(
free_value, psutil_value, delta=MEMORY_TOLERANCE,
msg='%s %s \n%s' % (free_value, psutil_value, free.output))
@retry_before_failing()
def test_available(self):
# "free" output format has changed at some point:
# https://github.com/giampaolo/psutil/issues/538#issuecomment-147192098
out = sh("free -b")
lines = out.split('\n')
if 'available' not in lines[0]:
raise unittest.SkipTest("free does not support 'available' column")
else:
free_value = int(lines[1].split()[-1])
psutil_value = psutil.virtual_memory().available
self.assertAlmostEqual(
free_value, psutil_value, delta=MEMORY_TOLERANCE,
msg='%s %s \n%s' % (free_value, psutil_value, out))
def test_warnings_on_misses(self):
# Emulate a case where /proc/meminfo provides few info.
# psutil is supposed to set the missing fields to 0 and
# raise a warning.
with mock_open_content(
'/proc/meminfo',
textwrap.dedent("""\
Active(anon): 6145416 kB
Active(file): 2950064 kB
Inactive(anon): 574764 kB
Inactive(file): 1567648 kB
MemAvailable: -1 kB
MemFree: 2057400 kB
MemTotal: 16325648 kB
SReclaimable: 346648 kB
""").encode()) as m:
with warnings.catch_warnings(record=True) as ws:
warnings.simplefilter("always")
ret = psutil.virtual_memory()
assert m.called
self.assertEqual(len(ws), 1)
w = ws[0]
assert w.filename.endswith('psutil/_pslinux.py')
self.assertIn(
"memory stats couldn't be determined", str(w.message))
self.assertIn("cached", str(w.message))
self.assertIn("shared", str(w.message))
self.assertIn("active", str(w.message))
self.assertIn("inactive", str(w.message))
self.assertIn("buffers", str(w.message))
self.assertIn("available", str(w.message))
self.assertEqual(ret.cached, 0)
self.assertEqual(ret.active, 0)
self.assertEqual(ret.inactive, 0)
self.assertEqual(ret.shared, 0)
self.assertEqual(ret.buffers, 0)
self.assertEqual(ret.available, 0)
self.assertEqual(ret.slab, 0)
def test_avail_old_percent(self):
# Make sure that our calculation of avail mem for old kernels
# is off by max 10%.
from psutil._pslinux import calculate_avail_vmem
from psutil._pslinux import open_binary
mems = {}
with open_binary('/proc/meminfo') as f:
for line in f:
fields = line.split()
mems[fields[0]] = int(fields[1]) * 1024
a = calculate_avail_vmem(mems)
if b'MemAvailable:' in mems:
b = mems[b'MemAvailable:']
diff_percent = abs(a - b) / a * 100
self.assertLess(diff_percent, 10)
def test_avail_old_comes_from_kernel(self):
# Make sure "MemAvailable:" coluimn is used instead of relying
# on our internal algorithm to calculate avail mem.
with mock_open_content(
'/proc/meminfo',
textwrap.dedent("""\
Active: 9444728 kB
Active(anon): 6145416 kB
Active(file): 2950064 kB
Buffers: 287952 kB
Cached: 4818144 kB
Inactive(file): 1578132 kB
Inactive(anon): 574764 kB
Inactive(file): 1567648 kB
MemAvailable: 6574984 kB
MemFree: 2057400 kB
MemTotal: 16325648 kB
Shmem: 577588 kB
SReclaimable: 346648 kB
""").encode()) as m:
with warnings.catch_warnings(record=True) as ws:
ret = psutil.virtual_memory()
assert m.called
self.assertEqual(ret.available, 6574984 * 1024)
w = ws[0]
self.assertIn(
"inactive memory stats couldn't be determined", str(w.message))
def test_avail_old_missing_fields(self):
# Remove Active(file), Inactive(file) and SReclaimable
# from /proc/meminfo and make sure the fallback is used
# (free + cached),
with mock_open_content(
"/proc/meminfo",
textwrap.dedent("""\
Active: 9444728 kB
Active(anon): 6145416 kB
Buffers: 287952 kB
Cached: 4818144 kB
Inactive(file): 1578132 kB
Inactive(anon): 574764 kB
MemFree: 2057400 kB
MemTotal: 16325648 kB
Shmem: 577588 kB
""").encode()) as m:
with warnings.catch_warnings(record=True) as ws:
ret = psutil.virtual_memory()
assert m.called
self.assertEqual(ret.available, 2057400 * 1024 + 4818144 * 1024)
w = ws[0]
self.assertIn(
"inactive memory stats couldn't be determined", str(w.message))
def test_avail_old_missing_zoneinfo(self):
# Remove /proc/zoneinfo file. Make sure fallback is used
# (free + cached).
with mock_open_content(
"/proc/meminfo",
textwrap.dedent("""\
Active: 9444728 kB
Active(anon): 6145416 kB
Active(file): 2950064 kB
Buffers: 287952 kB
Cached: 4818144 kB
Inactive(file): 1578132 kB
Inactive(anon): 574764 kB
Inactive(file): 1567648 kB
MemFree: 2057400 kB
MemTotal: 16325648 kB
Shmem: 577588 kB
SReclaimable: 346648 kB
""").encode()):
with mock_open_exception(
"/proc/zoneinfo",
IOError(errno.ENOENT, 'no such file or directory')):
with warnings.catch_warnings(record=True) as ws:
ret = psutil.virtual_memory()
self.assertEqual(
ret.available, 2057400 * 1024 + 4818144 * 1024)
w = ws[0]
self.assertIn(
"inactive memory stats couldn't be determined",
str(w.message))
def test_virtual_memory_mocked(self):
# Emulate /proc/meminfo because neither vmstat nor free return slab.
with mock_open_content(
'/proc/meminfo',
textwrap.dedent("""\
MemTotal: 100 kB
MemFree: 2 kB
MemAvailable: 3 kB
Buffers: 4 kB
Cached: 5 kB
SwapCached: 6 kB
Active: 7 kB
Inactive: 8 kB
Active(anon): 9 kB
Inactive(anon): 10 kB
Active(file): 11 kB
Inactive(file): 12 kB
Unevictable: 13 kB
Mlocked: 14 kB
SwapTotal: 15 kB
SwapFree: 16 kB
Dirty: 17 kB
Writeback: 18 kB
AnonPages: 19 kB
Mapped: 20 kB
Shmem: 21 kB
Slab: 22 kB
SReclaimable: 23 kB
SUnreclaim: 24 kB
KernelStack: 25 kB
PageTables: 26 kB
NFS_Unstable: 27 kB
Bounce: 28 kB
WritebackTmp: 29 kB
CommitLimit: 30 kB
Committed_AS: 31 kB
VmallocTotal: 32 kB
VmallocUsed: 33 kB
VmallocChunk: 34 kB
HardwareCorrupted: 35 kB
AnonHugePages: 36 kB
ShmemHugePages: 37 kB
ShmemPmdMapped: 38 kB
CmaTotal: 39 kB
CmaFree: 40 kB
HugePages_Total: 41 kB
HugePages_Free: 42 kB
HugePages_Rsvd: 43 kB
HugePages_Surp: 44 kB
Hugepagesize: 45 kB
DirectMap46k: 46 kB
DirectMap47M: 47 kB
DirectMap48G: 48 kB
""").encode()) as m:
mem = psutil.virtual_memory()
assert m.called
self.assertEqual(mem.total, 100 * 1024)
self.assertEqual(mem.free, 2 * 1024)
self.assertEqual(mem.buffers, 4 * 1024)
# cached mem also includes reclaimable memory
self.assertEqual(mem.cached, (5 + 23) * 1024)
self.assertEqual(mem.shared, 21 * 1024)
self.assertEqual(mem.active, 7 * 1024)
self.assertEqual(mem.inactive, 8 * 1024)
self.assertEqual(mem.slab, 22 * 1024)
self.assertEqual(mem.available, 3 * 1024)
# =====================================================================
# --- system swap memory
# =====================================================================
@unittest.skipIf(not LINUX, "LINUX only")
class TestSystemSwapMemory(unittest.TestCase):
@staticmethod
def meminfo_has_swap_info():
"""Return True if /proc/meminfo provides swap metrics."""
with open("/proc/meminfo") as f:
data = f.read()
return 'SwapTotal:' in data and 'SwapFree:' in data
def test_total(self):
free_value = free_swap().total
psutil_value = psutil.swap_memory().total
return self.assertAlmostEqual(
free_value, psutil_value, delta=MEMORY_TOLERANCE)
@retry_before_failing()
def test_used(self):
free_value = free_swap().used
psutil_value = psutil.swap_memory().used
return self.assertAlmostEqual(
free_value, psutil_value, delta=MEMORY_TOLERANCE)
@retry_before_failing()
def test_free(self):
free_value = free_swap().free
psutil_value = psutil.swap_memory().free
return self.assertAlmostEqual(
free_value, psutil_value, delta=MEMORY_TOLERANCE)
def test_missing_sin_sout(self):
with mock.patch('psutil._pslinux.open', create=True) as m:
with warnings.catch_warnings(record=True) as ws:
warnings.simplefilter("always")
ret = psutil.swap_memory()
assert m.called
self.assertEqual(len(ws), 1)
w = ws[0]
assert w.filename.endswith('psutil/_pslinux.py')
self.assertIn(
"'sin' and 'sout' swap memory stats couldn't "
"be determined", str(w.message))
self.assertEqual(ret.sin, 0)
self.assertEqual(ret.sout, 0)
def test_no_vmstat_mocked(self):
# see https://github.com/giampaolo/psutil/issues/722
with mock_open_exception(
"/proc/vmstat",
IOError(errno.ENOENT, 'no such file or directory')) as m:
with warnings.catch_warnings(record=True) as ws:
warnings.simplefilter("always")
ret = psutil.swap_memory()
assert m.called
self.assertEqual(len(ws), 1)
w = ws[0]
assert w.filename.endswith('psutil/_pslinux.py')
self.assertIn(
"'sin' and 'sout' swap memory stats couldn't "
"be determined and were set to 0",
str(w.message))
self.assertEqual(ret.sin, 0)
self.assertEqual(ret.sout, 0)
def test_meminfo_against_sysinfo(self):
# Make sure the content of /proc/meminfo about swap memory
# matches sysinfo() syscall, see:
# https://github.com/giampaolo/psutil/issues/1015
if not self.meminfo_has_swap_info():
return unittest.skip("/proc/meminfo has no swap metrics")
with mock.patch('psutil._pslinux.cext.linux_sysinfo') as m:
swap = psutil.swap_memory()
assert not m.called
import psutil._psutil_linux as cext
_, _, _, _, total, free, unit_multiplier = cext.linux_sysinfo()
total *= unit_multiplier
free *= unit_multiplier
self.assertEqual(swap.total, total)
self.assertEqual(swap.free, free)
def test_emulate_meminfo_has_no_metrics(self):
# Emulate a case where /proc/meminfo provides no swap metrics
# in which case sysinfo() syscall is supposed to be used
# as a fallback.
with mock_open_content("/proc/meminfo", b"") as m:
psutil.swap_memory()
assert m.called
# =====================================================================
# --- system CPU
# =====================================================================
@unittest.skipIf(not LINUX, "LINUX only")
class TestSystemCPU(unittest.TestCase):
@unittest.skipIf(TRAVIS, "unknown failure on travis")
def test_cpu_times(self):
fields = psutil.cpu_times()._fields
kernel_ver = re.findall(r'\d+\.\d+\.\d+', os.uname()[2])[0]
kernel_ver_info = tuple(map(int, kernel_ver.split('.')))
if kernel_ver_info >= (2, 6, 11):
self.assertIn('steal', fields)
else:
self.assertNotIn('steal', fields)
if kernel_ver_info >= (2, 6, 24):
self.assertIn('guest', fields)
else:
self.assertNotIn('guest', fields)
if kernel_ver_info >= (3, 2, 0):
self.assertIn('guest_nice', fields)
else:
self.assertNotIn('guest_nice', fields)
@unittest.skipIf(not os.path.exists("/sys/devices/system/cpu/online"),
"/sys/devices/system/cpu/online does not exist")
def test_cpu_count_logical_w_sysdev_cpu_online(self):
with open("/sys/devices/system/cpu/online") as f:
value = f.read().strip()
if "-" in str(value):
value = int(value.split('-')[1]) + 1
self.assertEqual(psutil.cpu_count(), value)
@unittest.skipIf(not os.path.exists("/sys/devices/system/cpu"),
"/sys/devices/system/cpu does not exist")
def test_cpu_count_logical_w_sysdev_cpu_num(self):
ls = os.listdir("/sys/devices/system/cpu")
count = len([x for x in ls if re.search(r"cpu\d+$", x) is not None])
self.assertEqual(psutil.cpu_count(), count)
@unittest.skipIf(not which("nproc"), "nproc utility not available")
def test_cpu_count_logical_w_nproc(self):
num = int(sh("nproc --all"))
self.assertEqual(psutil.cpu_count(logical=True), num)
@unittest.skipIf(not which("lscpu"), "lscpu utility not available")
def test_cpu_count_logical_w_lscpu(self):
out = sh("lscpu -p")
num = len([x for x in out.split('\n') if not x.startswith('#')])
self.assertEqual(psutil.cpu_count(logical=True), num)
def test_cpu_count_logical_mocked(self):
import psutil._pslinux
original = psutil._pslinux.cpu_count_logical()
# Here we want to mock os.sysconf("SC_NPROCESSORS_ONLN") in
# order to cause the parsing of /proc/cpuinfo and /proc/stat.
with mock.patch(
'psutil._pslinux.os.sysconf', side_effect=ValueError) as m:
self.assertEqual(psutil._pslinux.cpu_count_logical(), original)
assert m.called
# Let's have open() return emtpy data and make sure None is
# returned ('cause we mimick os.cpu_count()).
with mock.patch('psutil._pslinux.open', create=True) as m:
self.assertIsNone(psutil._pslinux.cpu_count_logical())
self.assertEqual(m.call_count, 2)
# /proc/stat should be the last one
self.assertEqual(m.call_args[0][0], '/proc/stat')
# Let's push this a bit further and make sure /proc/cpuinfo
# parsing works as expected.
with open('/proc/cpuinfo', 'rb') as f:
cpuinfo_data = f.read()
fake_file = io.BytesIO(cpuinfo_data)
with mock.patch('psutil._pslinux.open',
return_value=fake_file, create=True) as m:
self.assertEqual(psutil._pslinux.cpu_count_logical(), original)
# Finally, let's make /proc/cpuinfo return meaningless data;
# this way we'll fall back on relying on /proc/stat
with mock_open_content('/proc/cpuinfo', b"") as m:
self.assertEqual(psutil._pslinux.cpu_count_logical(), original)
m.called
def test_cpu_count_physical_mocked(self):
# Have open() return emtpy data and make sure None is returned
# ('cause we want to mimick os.cpu_count())
with mock.patch('psutil._pslinux.open', create=True) as m:
self.assertIsNone(psutil._pslinux.cpu_count_physical())
assert m.called
@unittest.skipIf(not HAS_CPU_FREQ, "not supported")
def test_cpu_freq_no_result(self):
with mock.patch("psutil._pslinux.glob.glob", return_value=[]):
self.assertIsNone(psutil.cpu_freq())
@unittest.skipIf(TRAVIS, "fails on Travis")
@unittest.skipIf(not HAS_CPU_FREQ, "not supported")
def test_cpu_freq_use_second_file(self):
# https://github.com/giampaolo/psutil/issues/981
def glob_mock(pattern):
if pattern.startswith("/sys/devices/system/cpu/cpufreq/policy"):
flags.append(None)
return []
else:
flags.append(None)
return orig_glob(pattern)
flags = []
orig_glob = glob.glob
with mock.patch("psutil._pslinux.glob.glob", side_effect=glob_mock,
create=True):
assert psutil.cpu_freq()
self.assertEqual(len(flags), 2)
@unittest.skipIf(not HAS_CPU_FREQ, "not supported")
def test_cpu_freq_emulate_data(self):
def open_mock(name, *args, **kwargs):
if name.endswith('/scaling_cur_freq'):
return io.BytesIO(b"500000")
elif name.endswith('/scaling_min_freq'):
return io.BytesIO(b"600000")
elif name.endswith('/scaling_max_freq'):
return io.BytesIO(b"700000")
else:
return orig_open(name, *args, **kwargs)
orig_open = open
patch_point = 'builtins.open' if PY3 else '__builtin__.open'
with mock.patch(patch_point, side_effect=open_mock):
with mock.patch(
'glob.glob',
return_value=['/sys/devices/system/cpu/cpufreq/policy0']):
freq = psutil.cpu_freq()
self.assertEqual(freq.current, 500.0)
self.assertEqual(freq.min, 600.0)
self.assertEqual(freq.max, 700.0)
@unittest.skipIf(not HAS_CPU_FREQ, "not supported")
def test_cpu_freq_emulate_multi_cpu(self):
def open_mock(name, *args, **kwargs):
if name.endswith('/scaling_cur_freq'):
return io.BytesIO(b"100000")
elif name.endswith('/scaling_min_freq'):
return io.BytesIO(b"200000")
elif name.endswith('/scaling_max_freq'):
return io.BytesIO(b"300000")
else:
return orig_open(name, *args, **kwargs)
orig_open = open
patch_point = 'builtins.open' if PY3 else '__builtin__.open'
policies = ['/sys/devices/system/cpu/cpufreq/policy0',
'/sys/devices/system/cpu/cpufreq/policy1',
'/sys/devices/system/cpu/cpufreq/policy2']
with mock.patch(patch_point, side_effect=open_mock):
with mock.patch('glob.glob', return_value=policies):
freq = psutil.cpu_freq()
self.assertEqual(freq.current, 100.0)
self.assertEqual(freq.min, 200.0)
self.assertEqual(freq.max, 300.0)
@unittest.skipIf(TRAVIS, "fails on Travis")
@unittest.skipIf(not HAS_CPU_FREQ, "not supported")
def test_cpu_freq_no_scaling_cur_freq_file(self):
# See: https://github.com/giampaolo/psutil/issues/1071
def open_mock(name, *args, **kwargs):
if name.endswith('/scaling_cur_freq'):
raise IOError(errno.ENOENT, "")
elif name.endswith('/cpuinfo_cur_freq'):
return io.BytesIO(b"200000")
else:
return orig_open(name, *args, **kwargs)
orig_open = open
patch_point = 'builtins.open' if PY3 else '__builtin__.open'
policies = ['/sys/devices/system/cpu/cpufreq/policy0',
'/sys/devices/system/cpu/cpufreq/policy1',
'/sys/devices/system/cpu/cpufreq/policy2']
with mock.patch(patch_point, side_effect=open_mock):
with mock.patch('glob.glob', return_value=policies):
freq = psutil.cpu_freq()
self.assertEqual(freq.current, 200)
# Also test that NotImplementedError is raised in case no
# current freq file is present.
def open_mock(name, *args, **kwargs):
if name.endswith('/scaling_cur_freq'):
raise IOError(errno.ENOENT, "")
elif name.endswith('/cpuinfo_cur_freq'):
raise IOError(errno.ENOENT, "")
else:
return orig_open(name, *args, **kwargs)
orig_open = open
patch_point = 'builtins.open' if PY3 else '__builtin__.open'
with mock.patch(patch_point, side_effect=open_mock):
with mock.patch('glob.glob', return_value=policies):
self.assertRaises(NotImplementedError, psutil.cpu_freq)
# =====================================================================
# --- system CPU stats
# =====================================================================
@unittest.skipIf(not LINUX, "LINUX only")
class TestSystemCPUStats(unittest.TestCase):
@unittest.skipIf(TRAVIS, "fails on Travis")
def test_ctx_switches(self):
vmstat_value = vmstat("context switches")
psutil_value = psutil.cpu_stats().ctx_switches
self.assertAlmostEqual(vmstat_value, psutil_value, delta=500)
@unittest.skipIf(TRAVIS, "fails on Travis")
def test_interrupts(self):
vmstat_value = vmstat("interrupts")
psutil_value = psutil.cpu_stats().interrupts
self.assertAlmostEqual(vmstat_value, psutil_value, delta=500)
# =====================================================================
# --- system network
# =====================================================================
@unittest.skipIf(not LINUX, "LINUX only")
class TestSystemNetwork(unittest.TestCase):
def test_net_if_addrs_ips(self):
for name, addrs in psutil.net_if_addrs().items():
for addr in addrs:
if addr.family == psutil.AF_LINK:
self.assertEqual(addr.address, get_mac_address(name))
elif addr.family == socket.AF_INET:
self.assertEqual(addr.address, get_ipv4_address(name))
# TODO: test for AF_INET6 family
def test_net_if_stats(self):
for name, stats in psutil.net_if_stats().items():
try:
out = sh("ifconfig %s" % name)
except RuntimeError:
pass
else:
# Not always reliable.
# self.assertEqual(stats.isup, 'RUNNING' in out, msg=out)
self.assertEqual(stats.mtu,
int(re.findall(r'(?i)MTU[: ](\d+)', out)[0]))
@retry_before_failing()
def test_net_io_counters(self):
def ifconfig(nic):
ret = {}
out = sh("ifconfig %s" % name)
ret['packets_recv'] = int(
re.findall(r'RX packets[: ](\d+)', out)[0])
ret['packets_sent'] = int(
re.findall(r'TX packets[: ](\d+)', out)[0])
ret['errin'] = int(re.findall(r'errors[: ](\d+)', out)[0])
ret['errout'] = int(re.findall(r'errors[: ](\d+)', out)[1])
ret['dropin'] = int(re.findall(r'dropped[: ](\d+)', out)[0])
ret['dropout'] = int(re.findall(r'dropped[: ](\d+)', out)[1])
ret['bytes_recv'] = int(
re.findall(r'RX (?:packets \d+ +)?bytes[: ](\d+)', out)[0])
ret['bytes_sent'] = int(
re.findall(r'TX (?:packets \d+ +)?bytes[: ](\d+)', out)[0])
return ret
nio = psutil.net_io_counters(pernic=True, nowrap=False)
for name, stats in nio.items():
try:
ifconfig_ret = ifconfig(name)
except RuntimeError:
continue
self.assertAlmostEqual(
stats.bytes_recv, ifconfig_ret['bytes_recv'], delta=1024 * 5)
self.assertAlmostEqual(
stats.bytes_sent, ifconfig_ret['bytes_sent'], delta=1024 * 5)
self.assertAlmostEqual(
stats.packets_recv, ifconfig_ret['packets_recv'], delta=1024)
self.assertAlmostEqual(
stats.packets_sent, ifconfig_ret['packets_sent'], delta=1024)
self.assertAlmostEqual(
stats.errin, ifconfig_ret['errin'], delta=10)
self.assertAlmostEqual(
stats.errout, ifconfig_ret['errout'], delta=10)
self.assertAlmostEqual(
stats.dropin, ifconfig_ret['dropin'], delta=10)
self.assertAlmostEqual(
stats.dropout, ifconfig_ret['dropout'], delta=10)
# XXX - not reliable when having virtual NICs installed by Docker.
# @unittest.skipIf(not which('ip'), "'ip' utility not available")
# @unittest.skipIf(TRAVIS, "skipped on Travis")
# def test_net_if_names(self):
# out = sh("ip addr").strip()
# nics = [x for x in psutil.net_if_addrs().keys() if ':' not in x]
# found = 0
# for line in out.split('\n'):
# line = line.strip()
# if re.search(r"^\d+:", line):
# found += 1
# name = line.split(':')[1].strip()
# self.assertIn(name, nics)
# self.assertEqual(len(nics), found, msg="%s\n---\n%s" % (
# pprint.pformat(nics), out))
@mock.patch('psutil._pslinux.socket.inet_ntop', side_effect=ValueError)
@mock.patch('psutil._pslinux.supports_ipv6', return_value=False)
def test_net_connections_ipv6_unsupported(self, supports_ipv6, inet_ntop):
# see: https://github.com/giampaolo/psutil/issues/623
try:
s = socket.socket(socket.AF_INET6, socket.SOCK_STREAM)
self.addCleanup(s.close)
s.bind(("::1", 0))
except socket.error:
pass
psutil.net_connections(kind='inet6')
def test_net_connections_mocked(self):
with mock_open_content(
'/proc/net/unix',
textwrap.dedent("""\
0: 00000003 000 000 0001 03 462170 @/tmp/dbus-Qw2hMPIU3n
0: 00000003 000 000 0001 03 35010 @/tmp/dbus-tB2X8h69BQ
0: 00000003 000 000 0001 03 34424 @/tmp/dbus-cHy80Y8O
000000000000000000000000000000000000000000000000000000
""")) as m:
psutil.net_connections(kind='unix')
assert m.called
# =====================================================================
# --- system disk
# =====================================================================
@unittest.skipIf(not LINUX, "LINUX only")
class TestSystemDisks(unittest.TestCase):
@unittest.skipIf(not hasattr(os, 'statvfs'), "os.statvfs() not available")
@skip_on_not_implemented()
def test_disk_partitions_and_usage(self):
# test psutil.disk_usage() and psutil.disk_partitions()
# against "df -a"
def df(path):
out = sh('df -P -B 1 "%s"' % path).strip()
lines = out.split('\n')
lines.pop(0)
line = lines.pop(0)
dev, total, used, free = line.split()[:4]
if dev == 'none':
dev = ''
total, used, free = int(total), int(used), int(free)
return dev, total, used, free
for part in psutil.disk_partitions(all=False):
usage = psutil.disk_usage(part.mountpoint)
dev, total, used, free = df(part.mountpoint)
self.assertEqual(usage.total, total)
# 10 MB tollerance
if abs(usage.free - free) > 10 * 1024 * 1024:
self.fail("psutil=%s, df=%s" % (usage.free, free))
if abs(usage.used - used) > 10 * 1024 * 1024:
self.fail("psutil=%s, df=%s" % (usage.used, used))
def test_disk_partitions_mocked(self):
# Test that ZFS partitions are returned.
with open("/proc/filesystems", "r") as f:
data = f.read()
if 'zfs' in data:
for part in psutil.disk_partitions():
if part.fstype == 'zfs':
break
else:
self.fail("couldn't find any ZFS partition")
else:
# No ZFS partitions on this system. Let's fake one.
fake_file = io.StringIO(u("nodev\tzfs\n"))
with mock.patch('psutil._pslinux.open',
return_value=fake_file, create=True) as m1:
with mock.patch(
'psutil._pslinux.cext.disk_partitions',
return_value=[('/dev/sdb3', '/', 'zfs', 'rw')]) as m2:
ret = psutil.disk_partitions()
assert m1.called
assert m2.called
assert ret
self.assertEqual(ret[0].fstype, 'zfs')
def test_disk_io_counters_kernel_2_4_mocked(self):
# Tests /proc/diskstats parsing format for 2.4 kernels, see:
# https://github.com/giampaolo/psutil/issues/767
with mock_open_content(
'/proc/partitions',
textwrap.dedent("""\
major minor #blocks name
8 0 488386584 hda
""")):
with mock_open_content(
'/proc/diskstats',
" 3 0 1 hda 2 3 4 5 6 7 8 9 10 11 12"):
ret = psutil.disk_io_counters(nowrap=False)
self.assertEqual(ret.read_count, 1)
self.assertEqual(ret.read_merged_count, 2)
self.assertEqual(ret.read_bytes, 3 * SECTOR_SIZE)
self.assertEqual(ret.read_time, 4)
self.assertEqual(ret.write_count, 5)
self.assertEqual(ret.write_merged_count, 6)
self.assertEqual(ret.write_bytes, 7 * SECTOR_SIZE)
self.assertEqual(ret.write_time, 8)
self.assertEqual(ret.busy_time, 10)
def test_disk_io_counters_kernel_2_6_full_mocked(self):
# Tests /proc/diskstats parsing format for 2.6 kernels,
# lines reporting all metrics:
# https://github.com/giampaolo/psutil/issues/767
with mock_open_content(
'/proc/partitions',
textwrap.dedent("""\
major minor #blocks name
8 0 488386584 hda
""")):
with mock_open_content(
'/proc/diskstats',
" 3 0 hda 1 2 3 4 5 6 7 8 9 10 11"):
ret = psutil.disk_io_counters(nowrap=False)
self.assertEqual(ret.read_count, 1)
self.assertEqual(ret.read_merged_count, 2)
self.assertEqual(ret.read_bytes, 3 * SECTOR_SIZE)
self.assertEqual(ret.read_time, 4)
self.assertEqual(ret.write_count, 5)
self.assertEqual(ret.write_merged_count, 6)
self.assertEqual(ret.write_bytes, 7 * SECTOR_SIZE)
self.assertEqual(ret.write_time, 8)
self.assertEqual(ret.busy_time, 10)
def test_disk_io_counters_kernel_2_6_limited_mocked(self):
# Tests /proc/diskstats parsing format for 2.6 kernels,
# where one line of /proc/partitions return a limited
# amount of metrics when it bumps into a partition
# (instead of a disk). See:
# https://github.com/giampaolo/psutil/issues/767
with mock_open_content(
'/proc/partitions',
textwrap.dedent("""\
major minor #blocks name
8 0 488386584 hda
""")):
with mock_open_content(
'/proc/diskstats',
" 3 1 hda 1 2 3 4"):
ret = psutil.disk_io_counters(nowrap=False)
self.assertEqual(ret.read_count, 1)
self.assertEqual(ret.read_bytes, 2 * SECTOR_SIZE)
self.assertEqual(ret.write_count, 3)
self.assertEqual(ret.write_bytes, 4 * SECTOR_SIZE)
self.assertEqual(ret.read_merged_count, 0)
self.assertEqual(ret.read_time, 0)
self.assertEqual(ret.write_merged_count, 0)
self.assertEqual(ret.write_time, 0)
self.assertEqual(ret.busy_time, 0)
# =====================================================================
# --- misc
# =====================================================================
@unittest.skipIf(not LINUX, "LINUX only")
class TestMisc(unittest.TestCase):
def test_boot_time(self):
vmstat_value = vmstat('boot time')
psutil_value = psutil.boot_time()
self.assertEqual(int(vmstat_value), int(psutil_value))
@mock.patch('psutil.traceback.print_exc')
def test_no_procfs_on_import(self, tb):
my_procfs = tempfile.mkdtemp()
with open(os.path.join(my_procfs, 'stat'), 'w') as f:
f.write('cpu 0 0 0 0 0 0 0 0 0 0\n')
f.write('cpu0 0 0 0 0 0 0 0 0 0 0\n')
f.write('cpu1 0 0 0 0 0 0 0 0 0 0\n')
try:
orig_open = open
def open_mock(name, *args, **kwargs):
if name.startswith('/proc'):
raise IOError(errno.ENOENT, 'rejecting access for test')
return orig_open(name, *args, **kwargs)
patch_point = 'builtins.open' if PY3 else '__builtin__.open'
with mock.patch(patch_point, side_effect=open_mock):
reload_module(psutil)
assert tb.called
self.assertRaises(IOError, psutil.cpu_times)
self.assertRaises(IOError, psutil.cpu_times, percpu=True)
self.assertRaises(IOError, psutil.cpu_percent)
self.assertRaises(IOError, psutil.cpu_percent, percpu=True)
self.assertRaises(IOError, psutil.cpu_times_percent)
self.assertRaises(
IOError, psutil.cpu_times_percent, percpu=True)
psutil.PROCFS_PATH = my_procfs
self.assertEqual(psutil.cpu_percent(), 0)
self.assertEqual(sum(psutil.cpu_times_percent()), 0)
# since we don't know the number of CPUs at import time,
# we awkwardly say there are none until the second call
per_cpu_percent = psutil.cpu_percent(percpu=True)
self.assertEqual(sum(per_cpu_percent), 0)
# ditto awkward length
per_cpu_times_percent = psutil.cpu_times_percent(percpu=True)
self.assertEqual(sum(map(sum, per_cpu_times_percent)), 0)
# much user, very busy
with open(os.path.join(my_procfs, 'stat'), 'w') as f:
f.write('cpu 1 0 0 0 0 0 0 0 0 0\n')
f.write('cpu0 1 0 0 0 0 0 0 0 0 0\n')
f.write('cpu1 1 0 0 0 0 0 0 0 0 0\n')
self.assertNotEqual(psutil.cpu_percent(), 0)
self.assertNotEqual(
sum(psutil.cpu_percent(percpu=True)), 0)
self.assertNotEqual(sum(psutil.cpu_times_percent()), 0)
self.assertNotEqual(
sum(map(sum, psutil.cpu_times_percent(percpu=True))), 0)
finally:
shutil.rmtree(my_procfs)
reload_module(psutil)
self.assertEqual(psutil.PROCFS_PATH, '/proc')
def test_cpu_steal_decrease(self):
# Test cumulative cpu stats decrease. We should ignore this.
# See issue #1210.
with mock_open_content(
"/proc/stat",
textwrap.dedent("""\
cpu 0 0 0 0 0 0 0 1 0 0
cpu0 0 0 0 0 0 0 0 1 0 0
cpu1 0 0 0 0 0 0 0 1 0 0
""").encode()) as m:
# first call to "percent" functions should read the new stat file
# and compare to the "real" file read at import time - so the
# values are meaningless
psutil.cpu_percent()
assert m.called
psutil.cpu_percent(percpu=True)
psutil.cpu_times_percent()
psutil.cpu_times_percent(percpu=True)
with mock_open_content(
"/proc/stat",
textwrap.dedent("""\
cpu 1 0 0 0 0 0 0 0 0 0
cpu0 1 0 0 0 0 0 0 0 0 0
cpu1 1 0 0 0 0 0 0 0 0 0
""").encode()) as m:
# Increase "user" while steal goes "backwards" to zero.
cpu_percent = psutil.cpu_percent()
assert m.called
cpu_percent_percpu = psutil.cpu_percent(percpu=True)
cpu_times_percent = psutil.cpu_times_percent()
cpu_times_percent_percpu = psutil.cpu_times_percent(percpu=True)
self.assertNotEqual(cpu_percent, 0)
self.assertNotEqual(sum(cpu_percent_percpu), 0)
self.assertNotEqual(sum(cpu_times_percent), 0)
self.assertNotEqual(sum(cpu_times_percent), 100.0)
self.assertNotEqual(sum(map(sum, cpu_times_percent_percpu)), 0)
self.assertNotEqual(sum(map(sum, cpu_times_percent_percpu)), 100.0)
self.assertEqual(cpu_times_percent.steal, 0)
self.assertNotEqual(cpu_times_percent.user, 0)
def test_boot_time_mocked(self):
with mock.patch('psutil._pslinux.open', create=True) as m:
self.assertRaises(
RuntimeError,
psutil._pslinux.boot_time)
assert m.called
def test_users_mocked(self):
# Make sure ':0' and ':0.0' (returned by C ext) are converted
# to 'localhost'.
with mock.patch('psutil._pslinux.cext.users',
return_value=[('giampaolo', 'pts/2', ':0',
1436573184.0, True, 2)]) as m:
self.assertEqual(psutil.users()[0].host, 'localhost')
assert m.called
with mock.patch('psutil._pslinux.cext.users',
return_value=[('giampaolo', 'pts/2', ':0.0',
1436573184.0, True, 2)]) as m:
self.assertEqual(psutil.users()[0].host, 'localhost')
assert m.called
# ...otherwise it should be returned as-is
with mock.patch('psutil._pslinux.cext.users',
return_value=[('giampaolo', 'pts/2', 'foo',
1436573184.0, True, 2)]) as m:
self.assertEqual(psutil.users()[0].host, 'foo')
assert m.called
def test_procfs_path(self):
tdir = tempfile.mkdtemp()
try:
psutil.PROCFS_PATH = tdir
self.assertRaises(IOError, psutil.virtual_memory)
self.assertRaises(IOError, psutil.cpu_times)
self.assertRaises(IOError, psutil.cpu_times, percpu=True)
self.assertRaises(IOError, psutil.boot_time)
# self.assertRaises(IOError, psutil.pids)
self.assertRaises(IOError, psutil.net_connections)
self.assertRaises(IOError, psutil.net_io_counters)
self.assertRaises(IOError, psutil.net_if_stats)
self.assertRaises(IOError, psutil.disk_io_counters)
self.assertRaises(IOError, psutil.disk_partitions)
self.assertRaises(psutil.NoSuchProcess, psutil.Process)
finally:
psutil.PROCFS_PATH = "/proc"
os.rmdir(tdir)
def test_sector_size_mock(self):
# Test SECTOR_SIZE fallback in case 'hw_sector_size' file
# does not exist.
def open_mock(name, *args, **kwargs):
if PY3 and isinstance(name, bytes):
name = name.decode()
if "hw_sector_size" in name:
flag.append(None)
raise IOError(errno.ENOENT, '')
else:
return orig_open(name, *args, **kwargs)
flag = []
orig_open = open
patch_point = 'builtins.open' if PY3 else '__builtin__.open'
with mock.patch(patch_point, side_effect=open_mock):
psutil.disk_io_counters()
assert flag
def test_issue_687(self):
# In case of thread ID:
# - pid_exists() is supposed to return False
# - Process(tid) is supposed to work
# - pids() should not return the TID
# See: https://github.com/giampaolo/psutil/issues/687
t = ThreadTask()
t.start()
try:
p = psutil.Process()
tid = p.threads()[1].id
assert not psutil.pid_exists(tid), tid
pt = psutil.Process(tid)
pt.as_dict()
self.assertNotIn(tid, psutil.pids())
finally:
t.stop()
def test_pid_exists_no_proc_status(self):
# Internally pid_exists relies on /proc/{pid}/status.
# Emulate a case where this file is empty in which case
# psutil is supposed to fall back on using pids().
with mock_open_content("/proc/%s/status", "") as m:
assert psutil.pid_exists(os.getpid())
assert m.called
# =====================================================================
# --- sensors
# =====================================================================
@unittest.skipIf(not LINUX, "LINUX only")
@unittest.skipIf(not HAS_BATTERY, "no battery")
class TestSensorsBattery(unittest.TestCase):
@unittest.skipIf(not which("acpi"), "acpi utility not available")
def test_percent(self):
out = sh("acpi -b")
acpi_value = int(out.split(",")[1].strip().replace('%', ''))
psutil_value = psutil.sensors_battery().percent
self.assertAlmostEqual(acpi_value, psutil_value, delta=1)
@unittest.skipIf(not which("acpi"), "acpi utility not available")
def test_power_plugged(self):
out = sh("acpi -b")
if 'unknown' in out.lower():
return unittest.skip("acpi output not reliable")
if 'discharging at zero rate' in out:
plugged = True
else:
plugged = "Charging" in out.split('\n')[0]
self.assertEqual(psutil.sensors_battery().power_plugged, plugged)
def test_emulate_power_plugged(self):
# Pretend the AC power cable is connected.
def open_mock(name, *args, **kwargs):
if name.endswith("AC0/online") or name.endswith("AC/online"):
return io.BytesIO(b"1")
else:
return orig_open(name, *args, **kwargs)
orig_open = open
patch_point = 'builtins.open' if PY3 else '__builtin__.open'
with mock.patch(patch_point, side_effect=open_mock) as m:
self.assertEqual(psutil.sensors_battery().power_plugged, True)
self.assertEqual(
psutil.sensors_battery().secsleft, psutil.POWER_TIME_UNLIMITED)
assert m.called
def test_emulate_power_plugged_2(self):
# Same as above but pretend /AC0/online does not exist in which
# case code relies on /status file.
def open_mock(name, *args, **kwargs):
if name.endswith("AC0/online") or name.endswith("AC/online"):
raise IOError(errno.ENOENT, "")
elif name.endswith("/status"):
return io.StringIO(u("charging"))
else:
return orig_open(name, *args, **kwargs)
orig_open = open
patch_point = 'builtins.open' if PY3 else '__builtin__.open'
with mock.patch(patch_point, side_effect=open_mock) as m:
self.assertEqual(psutil.sensors_battery().power_plugged, True)
assert m.called
def test_emulate_power_not_plugged(self):
# Pretend the AC power cable is not connected.
def open_mock(name, *args, **kwargs):
if name.endswith("AC0/online") or name.endswith("AC/online"):
return io.BytesIO(b"0")
else:
return orig_open(name, *args, **kwargs)
orig_open = open
patch_point = 'builtins.open' if PY3 else '__builtin__.open'
with mock.patch(patch_point, side_effect=open_mock) as m:
self.assertEqual(psutil.sensors_battery().power_plugged, False)
assert m.called
def test_emulate_power_not_plugged_2(self):
# Same as above but pretend /AC0/online does not exist in which
# case code relies on /status file.
def open_mock(name, *args, **kwargs):
if name.endswith("AC0/online") or name.endswith("AC/online"):
raise IOError(errno.ENOENT, "")
elif name.endswith("/status"):
return io.StringIO(u("discharging"))
else:
return orig_open(name, *args, **kwargs)
orig_open = open
patch_point = 'builtins.open' if PY3 else '__builtin__.open'
with mock.patch(patch_point, side_effect=open_mock) as m:
self.assertEqual(psutil.sensors_battery().power_plugged, False)
assert m.called
def test_emulate_power_undetermined(self):
# Pretend we can't know whether the AC power cable not
# connected (assert fallback to False).
def open_mock(name, *args, **kwargs):
if name.startswith("/sys/class/power_supply/AC0/online") or \
name.startswith("/sys/class/power_supply/AC/online"):
raise IOError(errno.ENOENT, "")
elif name.startswith("/sys/class/power_supply/BAT0/status"):
return io.BytesIO(b"???")
else:
return orig_open(name, *args, **kwargs)
orig_open = open
patch_point = 'builtins.open' if PY3 else '__builtin__.open'
with mock.patch(patch_point, side_effect=open_mock) as m:
self.assertIsNone(psutil.sensors_battery().power_plugged)
assert m.called
def test_emulate_no_base_files(self):
# Emulate a case where base metrics files are not present,
# in which case we're supposed to get None.
with mock_open_exception(
"/sys/class/power_supply/BAT0/energy_now",
IOError(errno.ENOENT, "")):
with mock_open_exception(
"/sys/class/power_supply/BAT0/charge_now",
IOError(errno.ENOENT, "")):
self.assertIsNone(psutil.sensors_battery())
def test_emulate_energy_full_0(self):
# Emulate a case where energy_full files returns 0.
with mock_open_content(
"/sys/class/power_supply/BAT0/energy_full", b"0") as m:
self.assertEqual(psutil.sensors_battery().percent, 0)
assert m.called
def test_emulate_energy_full_not_avail(self):
# Emulate a case where energy_full file does not exist.
# Expected fallback on /capacity.
with mock_open_exception(
"/sys/class/power_supply/BAT0/energy_full",
IOError(errno.ENOENT, "")):
with mock_open_exception(
"/sys/class/power_supply/BAT0/charge_full",
IOError(errno.ENOENT, "")):
with mock_open_content(
"/sys/class/power_supply/BAT0/capacity", b"88"):
self.assertEqual(psutil.sensors_battery().percent, 88)
def test_emulate_no_ac0_online(self):
# Emulate a case where /AC0/online file does not exist.
def path_exists_mock(name):
if name.startswith("/sys/class/power_supply/AC0/online"):
return False
else:
return orig_path_exists(name)
orig_path_exists = os.path.exists
with mock.patch("psutil._pslinux.os.path.exists",
side_effect=path_exists_mock) as m:
psutil.sensors_battery()
assert m.called
def test_emulate_no_power(self):
# Emulate a case where /AC0/online file nor /BAT0/status exist.
with mock_open_exception(
"/sys/class/power_supply/AC/online",
IOError(errno.ENOENT, "")):
with mock_open_exception(
"/sys/class/power_supply/AC0/online",
IOError(errno.ENOENT, "")):
with mock_open_exception(
"/sys/class/power_supply/BAT0/status",
IOError(errno.ENOENT, "")):
self.assertIsNone(psutil.sensors_battery().power_plugged)
@unittest.skipIf(not LINUX, "LINUX only")
class TestSensorsTemperatures(unittest.TestCase):
@unittest.skipIf(TRAVIS, "unreliable on TRAVIS")
def test_emulate_eio_error(self):
def open_mock(name, *args, **kwargs):
if name.endswith("_input"):
raise OSError(errno.EIO, "")
else:
return orig_open(name, *args, **kwargs)
orig_open = open
patch_point = 'builtins.open' if PY3 else '__builtin__.open'
with mock.patch(patch_point, side_effect=open_mock) as m:
with warnings.catch_warnings(record=True) as ws:
self.assertEqual(psutil.sensors_temperatures(), {})
assert m.called
self.assertIn("ignoring", str(ws[0].message))
def test_emulate_data(self):
def open_mock(name, *args, **kwargs):
if name.endswith('/name'):
return io.StringIO(u("name"))
elif name.endswith('/temp1_label'):
return io.StringIO(u("label"))
elif name.endswith('/temp1_input'):
return io.BytesIO(b"30000")
elif name.endswith('/temp1_max'):
return io.BytesIO(b"40000")
elif name.endswith('/temp1_crit'):
return io.BytesIO(b"50000")
else:
return orig_open(name, *args, **kwargs)
orig_open = open
patch_point = 'builtins.open' if PY3 else '__builtin__.open'
with mock.patch(patch_point, side_effect=open_mock):
with mock.patch('glob.glob',
return_value=['/sys/class/hwmon/hwmon0/temp1']):
temp = psutil.sensors_temperatures()['name'][0]
self.assertEqual(temp.label, 'label')
self.assertEqual(temp.current, 30.0)
self.assertEqual(temp.high, 40.0)
self.assertEqual(temp.critical, 50.0)
@unittest.skipIf(not LINUX, "LINUX only")
class TestSensorsFans(unittest.TestCase):
def test_emulate_data(self):
def open_mock(name, *args, **kwargs):
if name.endswith('/name'):
return io.StringIO(u("name"))
elif name.endswith('/fan1_label'):
return io.StringIO(u("label"))
elif name.endswith('/fan1_input'):
return io.StringIO(u("2000"))
else:
return orig_open(name, *args, **kwargs)
orig_open = open
patch_point = 'builtins.open' if PY3 else '__builtin__.open'
with mock.patch(patch_point, side_effect=open_mock):
with mock.patch('glob.glob',
return_value=['/sys/class/hwmon/hwmon2/fan1']):
fan = psutil.sensors_fans()['name'][0]
self.assertEqual(fan.label, 'label')
self.assertEqual(fan.current, 2000)
# =====================================================================
# --- test process
# =====================================================================
@unittest.skipIf(not LINUX, "LINUX only")
class TestProcess(unittest.TestCase):
def setUp(self):
safe_rmpath(TESTFN)
tearDown = setUp
def test_memory_full_info(self):
src = textwrap.dedent("""
import time
with open("%s", "w") as f:
time.sleep(10)
""" % TESTFN)
sproc = pyrun(src)
self.addCleanup(reap_children)
call_until(lambda: os.listdir('.'), "'%s' not in ret" % TESTFN)
p = psutil.Process(sproc.pid)
time.sleep(.1)
mem = p.memory_full_info()
maps = p.memory_maps(grouped=False)
self.assertAlmostEqual(
mem.uss, sum([x.private_dirty + x.private_clean for x in maps]),
delta=4096)
self.assertAlmostEqual(
mem.pss, sum([x.pss for x in maps]), delta=4096)
self.assertAlmostEqual(
mem.swap, sum([x.swap for x in maps]), delta=4096)
def test_memory_full_info_mocked(self):
# See: https://github.com/giampaolo/psutil/issues/1222
with mock_open_content(
"/proc/%s/smaps" % os.getpid(),
textwrap.dedent("""\
fffff0 r-xp 00000000 00:00 0 [vsyscall]
Size: 1 kB
Rss: 2 kB
Pss: 3 kB
Shared_Clean: 4 kB
Shared_Dirty: 5 kB
Private_Clean: 6 kB
Private_Dirty: 7 kB
Referenced: 8 kB
Anonymous: 9 kB
LazyFree: 10 kB
AnonHugePages: 11 kB
ShmemPmdMapped: 12 kB
Shared_Hugetlb: 13 kB
Private_Hugetlb: 14 kB
Swap: 15 kB
SwapPss: 16 kB
KernelPageSize: 17 kB
MMUPageSize: 18 kB
Locked: 19 kB
VmFlags: rd ex
""").encode()) as m:
p = psutil.Process()
mem = p.memory_full_info()
assert m.called
self.assertEqual(mem.uss, (6 + 7 + 14) * 1024)
self.assertEqual(mem.pss, 3 * 1024)
self.assertEqual(mem.swap, 15 * 1024)
# On PYPY file descriptors are not closed fast enough.
@unittest.skipIf(PYPY, "unreliable on PYPY")
def test_open_files_mode(self):
def get_test_file():
p = psutil.Process()
giveup_at = time.time() + 2
while True:
for file in p.open_files():
if file.path == os.path.abspath(TESTFN):
return file
elif time.time() > giveup_at:
break
raise RuntimeError("timeout looking for test file")
#
with open(TESTFN, "w"):
self.assertEqual(get_test_file().mode, "w")
with open(TESTFN, "r"):
self.assertEqual(get_test_file().mode, "r")
with open(TESTFN, "a"):
self.assertEqual(get_test_file().mode, "a")
#
with open(TESTFN, "r+"):
self.assertEqual(get_test_file().mode, "r+")
with open(TESTFN, "w+"):
self.assertEqual(get_test_file().mode, "r+")
with open(TESTFN, "a+"):
self.assertEqual(get_test_file().mode, "a+")
# note: "x" bit is not supported
if PY3:
safe_rmpath(TESTFN)
with open(TESTFN, "x"):
self.assertEqual(get_test_file().mode, "w")
safe_rmpath(TESTFN)
with open(TESTFN, "x+"):
self.assertEqual(get_test_file().mode, "r+")
def test_open_files_file_gone(self):
# simulates a file which gets deleted during open_files()
# execution
p = psutil.Process()
files = p.open_files()
with tempfile.NamedTemporaryFile():
# give the kernel some time to see the new file
call_until(p.open_files, "len(ret) != %i" % len(files))
with mock.patch('psutil._pslinux.os.readlink',
side_effect=OSError(errno.ENOENT, "")) as m:
files = p.open_files()
assert not files
assert m.called
# also simulate the case where os.readlink() returns EINVAL
# in which case psutil is supposed to 'continue'
with mock.patch('psutil._pslinux.os.readlink',
side_effect=OSError(errno.EINVAL, "")) as m:
self.assertEqual(p.open_files(), [])
assert m.called
def test_open_files_fd_gone(self):
# Simulate a case where /proc/{pid}/fdinfo/{fd} disappears
# while iterating through fds.
# https://travis-ci.org/giampaolo/psutil/jobs/225694530
p = psutil.Process()
files = p.open_files()
with tempfile.NamedTemporaryFile():
# give the kernel some time to see the new file
call_until(p.open_files, "len(ret) != %i" % len(files))
patch_point = 'builtins.open' if PY3 else '__builtin__.open'
with mock.patch(patch_point,
side_effect=IOError(errno.ENOENT, "")) as m:
files = p.open_files()
assert not files
assert m.called
# --- mocked tests
def test_terminal_mocked(self):
with mock.patch('psutil._pslinux._psposix.get_terminal_map',
return_value={}) as m:
self.assertIsNone(psutil._pslinux.Process(os.getpid()).terminal())
assert m.called
# TODO: re-enable this test.
# def test_num_ctx_switches_mocked(self):
# with mock.patch('psutil._pslinux.open', create=True) as m:
# self.assertRaises(
# NotImplementedError,
# psutil._pslinux.Process(os.getpid()).num_ctx_switches)
# assert m.called
def test_cmdline_mocked(self):
# see: https://github.com/giampaolo/psutil/issues/639
p = psutil.Process()
fake_file = io.StringIO(u('foo\x00bar\x00'))
with mock.patch('psutil._pslinux.open',
return_value=fake_file, create=True) as m:
self.assertEqual(p.cmdline(), ['foo', 'bar'])
assert m.called
fake_file = io.StringIO(u('foo\x00bar\x00\x00'))
with mock.patch('psutil._pslinux.open',
return_value=fake_file, create=True) as m:
self.assertEqual(p.cmdline(), ['foo', 'bar', ''])
assert m.called
def test_cmdline_spaces_mocked(self):
# see: https://github.com/giampaolo/psutil/issues/1179
p = psutil.Process()
fake_file = io.StringIO(u('foo bar '))
with mock.patch('psutil._pslinux.open',
return_value=fake_file, create=True) as m:
self.assertEqual(p.cmdline(), ['foo', 'bar'])
assert m.called
fake_file = io.StringIO(u('foo bar '))
with mock.patch('psutil._pslinux.open',
return_value=fake_file, create=True) as m:
self.assertEqual(p.cmdline(), ['foo', 'bar', ''])
assert m.called
def test_readlink_path_deleted_mocked(self):
with mock.patch('psutil._pslinux.os.readlink',
return_value='/home/foo (deleted)'):
self.assertEqual(psutil.Process().exe(), "/home/foo")
self.assertEqual(psutil.Process().cwd(), "/home/foo")
def test_threads_mocked(self):
# Test the case where os.listdir() returns a file (thread)
# which no longer exists by the time we open() it (race
# condition). threads() is supposed to ignore that instead
# of raising NSP.
def open_mock(name, *args, **kwargs):
if name.startswith('/proc/%s/task' % os.getpid()):
raise IOError(errno.ENOENT, "")
else:
return orig_open(name, *args, **kwargs)
orig_open = open
patch_point = 'builtins.open' if PY3 else '__builtin__.open'
with mock.patch(patch_point, side_effect=open_mock) as m:
ret = psutil.Process().threads()
assert m.called
self.assertEqual(ret, [])
# ...but if it bumps into something != ENOENT we want an
# exception.
def open_mock(name, *args, **kwargs):
if name.startswith('/proc/%s/task' % os.getpid()):
raise IOError(errno.EPERM, "")
else:
return orig_open(name, *args, **kwargs)
with mock.patch(patch_point, side_effect=open_mock):
self.assertRaises(psutil.AccessDenied, psutil.Process().threads)
def test_exe_mocked(self):
with mock.patch('psutil._pslinux.readlink',
side_effect=OSError(errno.ENOENT, "")) as m1:
with mock.patch('psutil.Process.cmdline',
side_effect=psutil.AccessDenied(0, "")) as m2:
# No such file error; might be raised also if /proc/pid/exe
# path actually exists for system processes with low pids
# (about 0-20). In this case psutil is supposed to return
# an empty string.
ret = psutil.Process().exe()
assert m1.called
assert m2.called
self.assertEqual(ret, "")
# ...but if /proc/pid no longer exist we're supposed to treat
# it as an alias for zombie process
with mock.patch('psutil._pslinux.os.path.lexists',
return_value=False):
self.assertRaises(
psutil.ZombieProcess, psutil.Process().exe)
def test_issue_1014(self):
# Emulates a case where smaps file does not exist. In this case
# wrap_exception decorator should not raise NoSuchProcess.
with mock_open_exception(
'/proc/%s/smaps' % os.getpid(),
IOError(errno.ENOENT, "")) as m:
p = psutil.Process()
with self.assertRaises(IOError) as err:
p.memory_maps()
self.assertEqual(err.exception.errno, errno.ENOENT)
assert m.called
@unittest.skipIf(not HAS_RLIMIT, "not supported")
def test_rlimit_zombie(self):
# Emulate a case where rlimit() raises ENOSYS, which may
# happen in case of zombie process:
# https://travis-ci.org/giampaolo/psutil/jobs/51368273
with mock.patch("psutil._pslinux.cext.linux_prlimit",
side_effect=OSError(errno.ENOSYS, "")) as m:
p = psutil.Process()
p.name()
with self.assertRaises(psutil.ZombieProcess) as exc:
p.rlimit(psutil.RLIMIT_NOFILE)
assert m.called
self.assertEqual(exc.exception.pid, p.pid)
self.assertEqual(exc.exception.name, p.name())
def test_cwd_zombie(self):
with mock.patch("psutil._pslinux.os.readlink",
side_effect=OSError(errno.ENOENT, "")) as m:
p = psutil.Process()
p.name()
with self.assertRaises(psutil.ZombieProcess) as exc:
p.cwd()
assert m.called
self.assertEqual(exc.exception.pid, p.pid)
self.assertEqual(exc.exception.name, p.name())
def test_stat_file_parsing(self):
from psutil._pslinux import CLOCK_TICKS
args = [
"0", # pid
"(cat)", # name
"Z", # status
"1", # ppid
"0", # pgrp
"0", # session
"0", # tty
"0", # tpgid
"0", # flags
"0", # minflt
"0", # cminflt
"0", # majflt
"0", # cmajflt
"2", # utime
"3", # stime
"4", # cutime
"5", # cstime
"0", # priority
"0", # nice
"0", # num_threads
"0", # itrealvalue
"6", # starttime
"0", # vsize
"0", # rss
"0", # rsslim
"0", # startcode
"0", # endcode
"0", # startstack
"0", # kstkesp
"0", # kstkeip
"0", # signal
"0", # blocked
"0", # sigignore
"0", # sigcatch
"0", # wchan
"0", # nswap
"0", # cnswap
"0", # exit_signal
"6", # processor
]
content = " ".join(args).encode()
with mock_open_content('/proc/%s/stat' % os.getpid(), content):
p = psutil.Process()
self.assertEqual(p.name(), 'cat')
self.assertEqual(p.status(), psutil.STATUS_ZOMBIE)
self.assertEqual(p.ppid(), 1)
self.assertEqual(
p.create_time(), 6 / CLOCK_TICKS + psutil.boot_time())
cpu = p.cpu_times()
self.assertEqual(cpu.user, 2 / CLOCK_TICKS)
self.assertEqual(cpu.system, 3 / CLOCK_TICKS)
self.assertEqual(cpu.children_user, 4 / CLOCK_TICKS)
self.assertEqual(cpu.children_system, 5 / CLOCK_TICKS)
self.assertEqual(p.cpu_num(), 6)
def test_status_file_parsing(self):
with mock_open_content(
'/proc/%s/status' % os.getpid(),
textwrap.dedent("""\
Uid:\t1000\t1001\t1002\t1003
Gid:\t1004\t1005\t1006\t1007
Threads:\t66
Cpus_allowed:\tf
Cpus_allowed_list:\t0-7
voluntary_ctxt_switches:\t12
nonvoluntary_ctxt_switches:\t13""").encode()):
p = psutil.Process()
self.assertEqual(p.num_ctx_switches().voluntary, 12)
self.assertEqual(p.num_ctx_switches().involuntary, 13)
self.assertEqual(p.num_threads(), 66)
uids = p.uids()
self.assertEqual(uids.real, 1000)
self.assertEqual(uids.effective, 1001)
self.assertEqual(uids.saved, 1002)
gids = p.gids()
self.assertEqual(gids.real, 1004)
self.assertEqual(gids.effective, 1005)
self.assertEqual(gids.saved, 1006)
self.assertEqual(p._proc._get_eligible_cpus(), list(range(0, 8)))
@unittest.skipIf(not LINUX, "LINUX only")
class TestProcessAgainstStatus(unittest.TestCase):
"""/proc/pid/stat and /proc/pid/status have many values in common.
Whenever possible, psutil uses /proc/pid/stat (it's faster).
For all those cases we check that the value found in
/proc/pid/stat (by psutil) matches the one found in
/proc/pid/status.
"""
@classmethod
def setUpClass(cls):
cls.proc = psutil.Process()
def read_status_file(self, linestart):
with psutil._psplatform.open_text(
'/proc/%s/status' % self.proc.pid) as f:
for line in f:
line = line.strip()
if line.startswith(linestart):
value = line.partition('\t')[2]
try:
return int(value)
except ValueError:
return value
raise ValueError("can't find %r" % linestart)
def test_name(self):
value = self.read_status_file("Name:")
self.assertEqual(self.proc.name(), value)
def test_status(self):
value = self.read_status_file("State:")
value = value[value.find('(') + 1:value.rfind(')')]
value = value.replace(' ', '-')
self.assertEqual(self.proc.status(), value)
def test_ppid(self):
value = self.read_status_file("PPid:")
self.assertEqual(self.proc.ppid(), value)
def test_num_threads(self):
value = self.read_status_file("Threads:")
self.assertEqual(self.proc.num_threads(), value)
def test_uids(self):
value = self.read_status_file("Uid:")
value = tuple(map(int, value.split()[1:4]))
self.assertEqual(self.proc.uids(), value)
def test_gids(self):
value = self.read_status_file("Gid:")
value = tuple(map(int, value.split()[1:4]))
self.assertEqual(self.proc.gids(), value)
@retry_before_failing()
def test_num_ctx_switches(self):
value = self.read_status_file("voluntary_ctxt_switches:")
self.assertEqual(self.proc.num_ctx_switches().voluntary, value)
value = self.read_status_file("nonvoluntary_ctxt_switches:")
self.assertEqual(self.proc.num_ctx_switches().involuntary, value)
def test_cpu_affinity(self):
value = self.read_status_file("Cpus_allowed_list:")
if '-' in str(value):
min_, max_ = map(int, value.split('-'))
self.assertEqual(
self.proc.cpu_affinity(), list(range(min_, max_ + 1)))
def test_cpu_affinity_eligible_cpus(self):
value = self.read_status_file("Cpus_allowed_list:")
with mock.patch("psutil._pslinux.per_cpu_times") as m:
self.proc._proc._get_eligible_cpus()
if '-' in str(value):
assert not m.called
else:
assert m.called
# =====================================================================
# --- test utils
# =====================================================================
@unittest.skipIf(not LINUX, "LINUX only")
class TestUtils(unittest.TestCase):
def test_open_text(self):
with psutil._psplatform.open_text(__file__) as f:
self.assertEqual(f.mode, 'rt')
def test_open_binary(self):
with psutil._psplatform.open_binary(__file__) as f:
self.assertEqual(f.mode, 'rb')
def test_readlink(self):
with mock.patch("os.readlink", return_value="foo (deleted)") as m:
self.assertEqual(psutil._psplatform.readlink("bar"), "foo")
assert m.called
def test_cat(self):
fname = os.path.abspath(TESTFN)
with open(fname, "wt") as f:
f.write("foo ")
self.assertEqual(psutil._psplatform.cat(TESTFN, binary=False), "foo")
self.assertEqual(psutil._psplatform.cat(TESTFN, binary=True), b"foo")
self.assertEqual(
psutil._psplatform.cat(TESTFN + '??', fallback="bar"), "bar")
if __name__ == '__main__':
run_test_module_by_name(__file__)
| 40.929221
| 79
| 0.550261
|
d8bb778d5dcc2fcd12e69a8030c8d1be119f118c
| 2,326
|
py
|
Python
|
src/function.py
|
kylerlmy/pythonpractice
|
6bdd329ac9adfc98c1cc4c37cc8581adad6018ad
|
[
"MIT"
] | null | null | null |
src/function.py
|
kylerlmy/pythonpractice
|
6bdd329ac9adfc98c1cc4c37cc8581adad6018ad
|
[
"MIT"
] | null | null | null |
src/function.py
|
kylerlmy/pythonpractice
|
6bdd329ac9adfc98c1cc4c37cc8581adad6018ad
|
[
"MIT"
] | null | null | null |
#----------------------------------函数----------------------------------
def say_hello():
#function body block start
print('hello world')
#function body block end
say_hello() #call function
say_hello() #call function again
#-----------------函数参数-----------------
def print_max(a,b):
if a>b:
print('{} is maximum'.format(a))
elif a==b:
say_hello()
print('{} is equal to {}'.format(a,b))
else:
print('{} is maximum'.format(b) )
print_max(8,5)
#-----------------global语句,局部变量-----------------
x=50
def func():
global x
print('x is:',x)
x=2
print('Changed global x to',x)
func()
print('Value of global x is',x)
#-----------------默认参数值-----------------
def say(message,times=1):
print(message * times)
say(1,5)
say('Hello')
say('Hello',5)
#-----------------关键字参数(命名参数)-----------------
def func(a,b=5,c=10):
print('a is',a,'and b is',b,'and c is',c)
func(3,7)
func(25,c=24)
func(c=50,a=100)
#-----------------可变参数 params-------------------------
def total(a=5,*numbers,**phonebook):
print('a',a)
#遍历元组中所有项目
for singgle_item in numbers:
print('single_item',singgle_item)
#遍历字典中的所有项目
for first_part,second_part in phonebook.items():
print(first_part,second_part)
print(total(10,1,2,3,Jack=1123,John=2231,Inge=1560))
print(total(1,2,3,Jack=1123,John=2231,Inge=1560))
#-----------------------return--------------------
# return 语句没有搭配任何一个值则代表着 返回 None,None 在Python中是一个特殊的类型,代表着虚无,
# 例如:每个函数都在其末尾部分,隐含了一句 return None
def maximum(x,y):
if(x>y):
return x
elif x==y:
return 'The numbers are equal'
else:
return y
print(maximum(2,3))
#---------Retun None---------------------------
def some_function():
pass #pass 语句用于指示一个没有内容的语句块
print(some_function()) #返回None
#--------------文档字符串 DocString----------
# 函数的第一行逻辑行中的字符串是该函数的 文档字符串(DocString)
#该文档字符串所约定的是一串多行字符串,其中第一行以某一大写字母开始,以句号结束。
#第二行为空行,后跟的第三行开始是任何详细的解释说明
def print_max(x,y):
'''print maximum of the numbers.
the two numbers must type of int'''
#如果可能,将其转换至整数类型
x=int(x)
y=int(y)
if x>y:
print(x,'is maximum')
else:
print(y,'is maximum')
print_max(3,5)
print(print_max.__doc__)#Python将所有东西都视为一个对象,这其中自然包括函,__doc__为函数的属性
help(print_max)#查看函数说明
| 19.546218
| 71
| 0.546862
|
d7ecacc9353dc3d9a786dec7e2914e00d88577e7
| 1,128
|
py
|
Python
|
wordgen/cli/__main__.py
|
snsinfu/web-wordgen
|
118b7e8ae59b9a314e52c88a0807dbb67cd69894
|
[
"MIT"
] | 1
|
2020-09-08T21:50:14.000Z
|
2020-09-08T21:50:14.000Z
|
wordgen/cli/__main__.py
|
snsinfu/web-wordgen
|
118b7e8ae59b9a314e52c88a0807dbb67cd69894
|
[
"MIT"
] | null | null | null |
wordgen/cli/__main__.py
|
snsinfu/web-wordgen
|
118b7e8ae59b9a314e52c88a0807dbb67cd69894
|
[
"MIT"
] | 1
|
2020-09-08T21:50:15.000Z
|
2020-09-08T21:50:15.000Z
|
import argparse
import signal
import sys
from .command import Train, Generate
def main():
signal.signal(signal.SIGINT, signal.SIG_DFL)
signal.signal(signal.SIGTERM, signal.SIG_DFL)
signal.signal(signal.SIGPIPE, signal.SIG_DFL)
args = parse_args()
mode = args.pop("mode")
if mode == "train":
command = Train(source=sys.stdin, **args)
if mode == "generate":
command = Generate(**args)
command.run()
def parse_args():
parser = argparse.ArgumentParser(prog="wordgen")
sub = parser.add_subparsers(dest="mode", required=True)
train_parser = sub.add_parser("train")
train_parser.add_argument("--group", type=str)
train_parser.add_argument("--token-size", type=int)
train_parser.add_argument("output", type=str)
generate_parser = sub.add_parser("generate")
generate_parser.add_argument("--group", type=str)
generate_parser.add_argument("--count", type=int)
generate_parser.add_argument("--prefix", type=str)
generate_parser.add_argument("input", type=str)
return vars(parser.parse_args())
if __name__ == "__main__":
main()
| 25.636364
| 59
| 0.68883
|
9e3ff65f31860192666d67cfa6d30c254f22cdf4
| 3,618
|
py
|
Python
|
tests/superset_test_config.py
|
piyush-singhal/incubator-superset
|
cf4edf73a6ad1a61e7a65f05b6e4201db3a4b6e4
|
[
"Apache-2.0"
] | 2
|
2020-07-27T04:07:50.000Z
|
2020-10-13T19:49:10.000Z
|
tests/superset_test_config.py
|
piyush-singhal/incubator-superset
|
cf4edf73a6ad1a61e7a65f05b6e4201db3a4b6e4
|
[
"Apache-2.0"
] | 1
|
2021-02-23T16:45:47.000Z
|
2021-04-24T23:15:03.000Z
|
tests/superset_test_config.py
|
piyush-singhal/incubator-superset
|
cf4edf73a6ad1a61e7a65f05b6e4201db3a4b6e4
|
[
"Apache-2.0"
] | null | null | null |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# type: ignore
from copy import copy
from superset.config import *
from tests.superset_test_custom_template_processors import CustomPrestoTemplateProcessor
AUTH_USER_REGISTRATION_ROLE = "alpha"
SQLALCHEMY_DATABASE_URI = "sqlite:///" + os.path.join(DATA_DIR, "unittests.db")
DEBUG = True
SUPERSET_WEBSERVER_PORT = 8081
# Allowing SQLALCHEMY_DATABASE_URI and SQLALCHEMY_EXAMPLES_URI to be defined as an env vars for
# continuous integration
if "SUPERSET__SQLALCHEMY_DATABASE_URI" in os.environ:
SQLALCHEMY_DATABASE_URI = os.environ["SUPERSET__SQLALCHEMY_DATABASE_URI"]
SQLALCHEMY_EXAMPLES_URI = SQLALCHEMY_DATABASE_URI
if "SUPERSET__SQLALCHEMY_EXAMPLES_URI" in os.environ:
SQLALCHEMY_EXAMPLES_URI = os.environ["SUPERSET__SQLALCHEMY_EXAMPLES_URI"]
if "UPLOAD_FOLDER" in os.environ:
UPLOAD_FOLDER = os.environ["UPLOAD_FOLDER"]
if "sqlite" in SQLALCHEMY_DATABASE_URI:
logger.warning(
"SQLite Database support for metadata databases will be "
"removed in a future version of Superset."
)
# Speeding up the tests.
PRESTO_POLL_INTERVAL = 0.1
HIVE_POLL_INTERVAL = 0.1
SQL_MAX_ROW = 666
SQLLAB_CTAS_NO_LIMIT = True # SQL_MAX_ROW will not take affect for the CTA queries
FEATURE_FLAGS = {
**FEATURE_FLAGS,
"foo": "bar",
"KV_STORE": True,
"SHARE_QUERIES_VIA_KV_STORE": True,
"ENABLE_TEMPLATE_PROCESSING": True,
}
def GET_FEATURE_FLAGS_FUNC(ff):
ff_copy = copy(ff)
ff_copy["super"] = "set"
return ff_copy
TESTING = True
WTF_CSRF_ENABLED = False
FAB_ROLES = {"TestRole": [["Security", "menu_access"], ["List Users", "menu_access"]]}
PUBLIC_ROLE_LIKE = "Gamma"
AUTH_ROLE_PUBLIC = "Public"
EMAIL_NOTIFICATIONS = False
ENABLE_ROW_LEVEL_SECURITY = True
ENABLE_REACT_CRUD_VIEWS = os.environ.get("ENABLE_REACT_CRUD_VIEWS", False)
CACHE_CONFIG = {"CACHE_TYPE": "simple"}
REDIS_HOST = os.environ.get("REDIS_HOST", "localhost")
REDIS_PORT = os.environ.get("REDIS_PORT", "6379")
REDIS_CELERY_DB = os.environ.get("REDIS_CELERY_DB", 2)
REDIS_RESULTS_DB = os.environ.get("REDIS_RESULTS_DB", 3)
REDIS_CACHE_DB = os.environ.get("REDIS_CACHE_DB", 4)
CACHE_CONFIG = {
"CACHE_TYPE": "redis",
"CACHE_DEFAULT_TIMEOUT": 60 * 60 * 24, # 1 day default (in secs)
"CACHE_KEY_PREFIX": "superset_cache",
"CACHE_REDIS_URL": f"redis://{REDIS_HOST}:{REDIS_PORT}/{REDIS_CACHE_DB}",
}
class CeleryConfig(object):
BROKER_URL = f"redis://{REDIS_HOST}:{REDIS_PORT}/{REDIS_CELERY_DB}"
CELERY_IMPORTS = ("superset.sql_lab",)
CELERY_RESULT_BACKEND = f"redis://{REDIS_HOST}:{REDIS_PORT}/{REDIS_RESULTS_DB}"
CELERY_ANNOTATIONS = {"sql_lab.add": {"rate_limit": "10/s"}}
CONCURRENCY = 1
CELERY_CONFIG = CeleryConfig
CUSTOM_TEMPLATE_PROCESSORS = {
CustomPrestoTemplateProcessor.engine: CustomPrestoTemplateProcessor
}
PRESERVE_CONTEXT_ON_EXCEPTION = False
| 32.890909
| 95
| 0.758983
|
c5651a9d524f758f01142f52d7ed41c5e0f241bb
| 282
|
py
|
Python
|
src/config.py
|
facundoic/Data-Science-Projects
|
a9af7546cdcbbef3c9fcad5ea8ffce1f16d1d059
|
[
"MIT"
] | null | null | null |
src/config.py
|
facundoic/Data-Science-Projects
|
a9af7546cdcbbef3c9fcad5ea8ffce1f16d1d059
|
[
"MIT"
] | null | null | null |
src/config.py
|
facundoic/Data-Science-Projects
|
a9af7546cdcbbef3c9fcad5ea8ffce1f16d1d059
|
[
"MIT"
] | null | null | null |
link = 'http://deis.msal.gov.ar:8097/pentaho/api/repos/%3Apublic%3Adeis%3AdefuncSegunGrupoEdadxPeriodoAgrupCausaMuerteProvResSexo.wcdf/generatedContent?userid=deis&password=deis2016'
driver_path = '/home/facundoic/Desktop/GitHub/Repositories/Data-Science-Projects/src/chromedriver'
| 94
| 182
| 0.851064
|
214a7e36ee63f0081c7c8f6bff9f48ce7db094ba
| 1,729
|
py
|
Python
|
packager/version/generate_version_string.py
|
ivanvgdev/shaka-packager
|
5bf8ad5ed5e6adeed63a264dd43ebb9d649f6621
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 4
|
2018-11-08T21:37:13.000Z
|
2019-03-18T18:56:51.000Z
|
packager/version/generate_version_string.py
|
ivanvgdev/shaka-packager
|
5bf8ad5ed5e6adeed63a264dd43ebb9d649f6621
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 1
|
2022-02-10T12:40:20.000Z
|
2022-02-10T12:40:20.000Z
|
packager/version/generate_version_string.py
|
aminyazdanpanah/shaka-packager
|
ea2b192b92ce24c68e48d9f29f7aa176e40ef875
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 3
|
2018-10-26T20:40:52.000Z
|
2020-04-01T12:05:42.000Z
|
#!/usr/bin/python
#
# Copyright 2015 Google Inc. All rights reserved.
#
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file or at
# https://developers.google.com/open-source/licenses/bsd
"""This script is used to generate version string for packager."""
import subprocess
# To support python version before 2.7, which does not have
# subprocess.check_output.
if 'check_output' not in dir(subprocess):
def check_output_implementation(*popenargs, **kwargs):
"""Implement check_output if it is not available."""
if 'stdout' in kwargs:
raise ValueError('stdout argument not allowed, it will be overridden.')
process = subprocess.Popen(stdout=subprocess.PIPE, *popenargs, **kwargs)
output, unused_err = process.communicate()
retcode = process.poll()
if retcode:
cmd = kwargs.get('args')
if cmd is None:
cmd = popenargs[0]
raise subprocess.CalledProcessError(retcode, cmd)
return output
subprocess.check_output = check_output_implementation
if __name__ == '__main__':
try:
version_tag = subprocess.check_output(
['git', 'tag', '--points-at', 'HEAD'],
stderr=subprocess.STDOUT).rstrip()
except subprocess.CalledProcessError as e:
# git tag --points-at is not supported in old versions of git. Just ignore
# version_tag in this case.
version_tag = None
try:
version_hash = subprocess.check_output(
['git', 'rev-parse', '--short', 'HEAD'],
stderr=subprocess.STDOUT).rstrip()
except subprocess.CalledProcessError as e:
version_hash = 'unknown-version'
if version_tag:
print '{0}-{1}'.format(version_tag, version_hash)
else:
print version_hash
| 32.622642
| 78
| 0.699248
|
6dba7235559b527ae6144e9183f3168a214b5e1e
| 775
|
py
|
Python
|
var/spack/repos/builtin/packages/memaxes/package.py
|
kkauder/spack
|
6ae8d5c380c1f42094b05d38be26b03650aafb39
|
[
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 2,360
|
2017-11-06T08:47:01.000Z
|
2022-03-31T14:45:33.000Z
|
var/spack/repos/builtin/packages/memaxes/package.py
|
kkauder/spack
|
6ae8d5c380c1f42094b05d38be26b03650aafb39
|
[
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 13,838
|
2017-11-04T07:49:45.000Z
|
2022-03-31T23:38:39.000Z
|
var/spack/repos/builtin/packages/memaxes/package.py
|
kkauder/spack
|
6ae8d5c380c1f42094b05d38be26b03650aafb39
|
[
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 1,793
|
2017-11-04T07:45:50.000Z
|
2022-03-30T14:31:53.000Z
|
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Memaxes(Package):
"""MemAxes is a visualizer for sampled memory trace data."""
homepage = "https://github.com/llnl/MemAxes"
version('0.5', sha256='9858f0f675b50e347d0b88545558e5d6b4333347c762b15d399b8d8004d7b68b',
url='https://github.com/llnl/MemAxes/archive/v0.5.tar.gz')
depends_on('cmake@2.8.9:', type='build')
depends_on("qt@5:")
def install(self, spec, prefix):
with working_dir('spack-build', create=True):
cmake('..', *std_cmake_args)
make()
make("install")
| 31
| 93
| 0.672258
|
a3e257ead45310e20d610bc606baca00f7fbf48a
| 8,008
|
py
|
Python
|
temp_old/20201231/lamp_setup_pair_pipette.py
|
yangqinwhu/opentron
|
653db54d5f8461ebbfd58818fee5f055f61dc7eb
|
[
"MIT"
] | null | null | null |
temp_old/20201231/lamp_setup_pair_pipette.py
|
yangqinwhu/opentron
|
653db54d5f8461ebbfd58818fee5f055f61dc7eb
|
[
"MIT"
] | null | null | null |
temp_old/20201231/lamp_setup_pair_pipette.py
|
yangqinwhu/opentron
|
653db54d5f8461ebbfd58818fee5f055f61dc7eb
|
[
"MIT"
] | 1
|
2021-06-04T05:21:59.000Z
|
2021-06-04T05:21:59.000Z
|
"""Use lamp_setup_app.py to calibrate all labware first"""
from opentrons import protocol_api
import opentrons.execute # This returns the same kind of object - a ProtocolContext - that is passed into your protocol’s run function when you upload your protocol in the Opentrons App
import json,timeit,time
def heating(tm_deck,temp = 95, heat_time = 5):
"""Set the temperature to temp, then heat for time (5) minutes
lower the temperature to 37C and deactivate the temp deck"""
import timeit,heat_time
temp,heat_time = temp,heat_time
start = timeit.default_timer()
tm_deck.set_temperature(temp)
ramp_time = timeit.default_timer() - start
time.sleep(heat_time*60)
tm_deck.set_temperature(25)
tm_deck.deactivate()
def multi_transfer(s,d, b = 0,samp_vol= 100,air_vol = 25, buffer_vol = 0,simulate = False):
""" buffer_well
s: source well
d: destination well
Transfer from source well: s to destination well"""
#print ("Transfering saliva samples in rack {} column {}:".format(1,2))
multi_pipette.flow_rate.aspirate = 120
multi_pipette.flow_rate.dispense = 120
start = timeit.default_timer()
total_vol = samp_vol+air_vol+buffer_vol
multi_pipette.pick_up_tip(presses=tip_presses, increment=tip_press_increment)
if buffer_vol !=0:
multi_pipette.aspirate(buffer_vol, location = b.bottom(2))
multi_pipette.air_gap(air_vol)
total_vol +=air_vol
multi_pipette.aspirate(samp_vol, s.bottom(10))
multi_pipette.air_gap(air_vol)
multi_pipette.dispense(total_vol, d.bottom(5))
multi_pipette.flow_rate.dispense = 20
multi_pipette.mix(1,int(total_vol/2))
multi_pipette.air_gap(air_vol)
stop = timeit.default_timer()
if simulate:
multi_pipette.return_tip()
else:
multi_pipette.drop_tip()
stop1 = timeit.default_timer()
run_time = stop1 - start
dest_well = d
return run_time,dest_well,stop
protocol = opentrons.execute.get_protocol_api('2.7')
LABWARE_DEF_JSON = """{"ordering":[["A1","B1","C1","D1"],["A2","B2","C2","D2"],["A3","B3","C3","D3"],["A4","B4","C4","D4"],["A5","B5","C5","D5"],["A6","B6","C6","D6"]],"brand":{"brand":"ams2401","brandId":[]},"metadata":{"displayName":"ams2401 5ml rack","displayCategory":"wellPlate","displayVolumeUnits":"µL","tags":[]},"dimensions":{"xDimension":127.76,"yDimension":85.47,"zDimension":72},"wells":{"A1":{"depth":50,"totalLiquidVolume":5000,"shape":"circular","diameter":14.2,"x":18.38,"y":69.73,"z":22},"B1":{"depth":50,"totalLiquidVolume":5000,"shape":"circular","diameter":14.2,"x":18.38,"y":51.83,"z":22},"C1":{"depth":50,"totalLiquidVolume":5000,"shape":"circular","diameter":14.2,"x":18.38,"y":33.93,"z":22},"D1":{"depth":50,"totalLiquidVolume":5000,"shape":"circular","diameter":14.2,"x":18.38,"y":16.03,"z":22},"A2":{"depth":50,"totalLiquidVolume":5000,"shape":"circular","diameter":14.2,"x":36.28,"y":69.73,"z":22},"B2":{"depth":50,"totalLiquidVolume":5000,"shape":"circular","diameter":14.2,"x":36.28,"y":51.83,"z":22},"C2":{"depth":50,"totalLiquidVolume":5000,"shape":"circular","diameter":14.2,"x":36.28,"y":33.93,"z":22},"D2":{"depth":50,"totalLiquidVolume":5000,"shape":"circular","diameter":14.2,"x":36.28,"y":16.03,"z":22},"A3":{"depth":50,"totalLiquidVolume":5000,"shape":"circular","diameter":14.2,"x":54.18,"y":69.73,"z":22},"B3":{"depth":50,"totalLiquidVolume":5000,"shape":"circular","diameter":14.2,"x":54.18,"y":51.83,"z":22},"C3":{"depth":50,"totalLiquidVolume":5000,"shape":"circular","diameter":14.2,"x":54.18,"y":33.93,"z":22},"D3":{"depth":50,"totalLiquidVolume":5000,"shape":"circular","diameter":14.2,"x":54.18,"y":16.03,"z":22},"A4":{"depth":50,"totalLiquidVolume":5000,"shape":"circular","diameter":14.2,"x":72.08,"y":69.73,"z":22},"B4":{"depth":50,"totalLiquidVolume":5000,"shape":"circular","diameter":14.2,"x":72.08,"y":51.83,"z":22},"C4":{"depth":50,"totalLiquidVolume":5000,"shape":"circular","diameter":14.2,"x":72.08,"y":33.93,"z":22},"D4":{"depth":50,"totalLiquidVolume":5000,"shape":"circular","diameter":14.2,"x":72.08,"y":16.03,"z":22},"A5":{"depth":50,"totalLiquidVolume":5000,"shape":"circular","diameter":14.2,"x":89.98,"y":69.73,"z":22},"B5":{"depth":50,"totalLiquidVolume":5000,"shape":"circular","diameter":14.2,"x":89.98,"y":51.83,"z":22},"C5":{"depth":50,"totalLiquidVolume":5000,"shape":"circular","diameter":14.2,"x":89.98,"y":33.93,"z":22},"D5":{"depth":50,"totalLiquidVolume":5000,"shape":"circular","diameter":14.2,"x":89.98,"y":16.03,"z":22},"A6":{"depth":50,"totalLiquidVolume":5000,"shape":"circular","diameter":14.2,"x":107.88,"y":69.73,"z":22},"B6":{"depth":50,"totalLiquidVolume":5000,"shape":"circular","diameter":14.2,"x":107.88,"y":51.83,"z":22},"C6":{"depth":50,"totalLiquidVolume":5000,"shape":"circular","diameter":14.2,"x":107.88,"y":33.93,"z":22},"D6":{"depth":50,"totalLiquidVolume":5000,"shape":"circular","diameter":14.2,"x":107.88,"y":16.03,"z":22}},"groups":[{"metadata":{"wellBottomShape":"v"},"wells":["A1","B1","C1","D1","A2","B2","C2","D2","A3","B3","C3","D3","A4","B4","C4","D4","A5","B5","C5","D5","A6","B6","C6","D6"]}],"parameters":{"format":"irregular","quirks":[],"isTiprack":false,"isMagneticModuleCompatible":false,"loadName":"ams2401_24_wellplate_5000ul"},"namespace":"custom_beta","version":1,"schemaVersion":2,"cornerOffsetFromSlot":{"x":0,"y":0,"z":0}}"""
saliva_rack = json.loads(LABWARE_DEF_JSON)
LABWARE_LABEL = saliva_rack.get('metadata', {}).get(
'displayName', 'test labware')
metadata = {
'protocolName': 'Saliva to DTT',
'apiLevel': '2.5'
}
samp_vol = 100
buffer_vol = int(samp_vol/4)
air_vol = 20
total_vol = samp_vol+buffer_vol+air_vol
samples = 48
# load labware and pipettes
p200_tip_name = "opentrons_96_filtertiprack_200ul"
p200_tip_slots = ["1","11"]
p10_tip_name = "opentrons_96_filtertiprack_20ul"
p10_tip_slots = ["3"]
right_pip_name = "p300_multi"
left_pip_name = "p300_multi"
plate_name = 'nest_96_wellplate_100ul_pcr_full_skirt'
plate_slot ="7"
lampMM_plate_slot = '8'
dtt_slot = "2"
rack_name = saliva_rack
rack_slots = ["5","6"]
temp_module_slot = '9'
p200_tips = [protocol.load_labware(p200_tip_name, slot) for slot in p200_tip_slots]
p10_tips = [protocol.load_labware(p10_tip_name, slot) for slot in p10_tip_slots]
# single_pipette = protocol.load_instrument(right_pip_name, 'right', tip_racks=p10_tips)
multi_pipette_2 = protocol.load_instrument(right_pip_name, 'right', tip_racks=p200_tips)
multi_pipette_1 = protocol.load_instrument(left_pip_name, 'left', tip_racks=p200_tips)
multi_pipette = multi_pipette_1
src_racks = [protocol.load_labware_from_definition(rack_name,slot) for slot in rack_slots]
src_tubes = src_racks[0].rows()[0]+src_racks[1].rows()[0]
dtt_plate = protocol.load_labware(plate_name, dtt_slot)
lampMM_plate = protocol.load_labware(plate_name, lampMM_plate_slot)
# tm_deck = protocol.load_module('Temperature Module', temp_module_slot)
# tm_plate = tm_deck.load_labware(plate_name)
multi_pipette.flow_rate.aspirate = 120
multi_pipette.flow_rate.dispense = 120
tip_press_increment=0.4
tip_presses = 1
start_all = timeit.default_timer()
samples = 48
sample_c = int((samples-1)/4)+1
wells = []
incubation_start_times =[]
dest_plate = tm_plate
for s, d,b in zip(src_tubes[:sample_c],dest_plate.rows()[0][:sample_c],dtt_plate.rows()[0][:sample_c]):
run_time,well,incubation_start_time = multi_transfer(s,d,b,buffer_vol=25)
wells.append(well)
incubation_start_times.append(incubation_start_time)
print ("transfer time is {} second".format(run_time))
# for s,d,t in zip(wells,lampMM_plate.rows()[0][:len(wells)],incubation_start_times):
# start = timeit.default_timer()
# t0 = start-t
# print ("Sample already on hot plate for {} minutes.".format(t0/60))
# if t0 >300:
# print ("Sample already on hot plate for {} minutes.".format(t0/60))
# else:
# time.sleep(300-t0)
# multi_transfer(s,d)
stop = timeit.default_timer()
run_time = stop -start_all
print ('Total run time :', run_time/60)
| 59.318519
| 3,347
| 0.692308
|
69afe0f32666652b9b51f1bb3656dbc5102875b0
| 1,872
|
py
|
Python
|
nature/bricks/graph/_task.py
|
bionicles/neuromax
|
a53a17a1c033c11ac607a9e28f43b1f906e58aad
|
[
"MIT"
] | null | null | null |
nature/bricks/graph/_task.py
|
bionicles/neuromax
|
a53a17a1c033c11ac607a9e28f43b1f906e58aad
|
[
"MIT"
] | null | null | null |
nature/bricks/graph/_task.py
|
bionicles/neuromax
|
a53a17a1c033c11ac607a9e28f43b1f906e58aad
|
[
"MIT"
] | null | null | null |
import tensorflow as tf
import networkx as nx
from nature import add_node, get_output, screenshot_graph
from tools import plot_model, log
K, L = tf.keras, tf.keras.layers
MIN_STACKS, MAX_STACKS = 0, 8
def TaskGraph(AI, in_specs, out_specs):
G = nx.MultiDiGraph()
add_node(G, "critic", "red", "triangle", "critic", spec=AI.loss_spec)
add_node(G, "source", "gold", "cylinder", "source")
add_node(G, "m_0", "black", "circle", "merge")
add_node(G, "m_1", "black", "circle", "merge")
add_node(G, "m_2", "black", "circle", "merge")
add_node(G, "sink", "gold", "cylinder", "sink")
G.add_edges_from([('m_0', 'm_1'), ('m_1', 'm_2'), ('m_2', 'critic')])
n_stacks = AI.pull("n_stacks", MIN_STACKS, MAX_STACKS)
for i in range(n_stacks):
add_node(G, i, "black", "square", "brick")
G.add_edges_from([("m_0", i), (i, 'm_1')])
for n, in_spec in enumerate(in_specs):
in_key = f"input_{n}"
add_node(G, in_key, "blue", "circle", "input", spec=in_spec, n=n)
G.add_edge("source", in_key)
G.add_edge(in_key, "m_0")
for n, out_spec in enumerate(out_specs):
out_key = f"output_{n}"
add_node(G, out_key, "red", "triangle", "output", spec=out_spec, n=n)
G.add_edges_from([
('m_1', out_key), (out_key, "m_2"), (out_key, 'sink')])
G.add_edge("critic", "sink")
return G
def Model(G, AI):
outputs = [get_output(G, AI, i) for i in list(G.predecessors("sink"))]
inputs = [G.node[i]['input'] for i in list(G.successors('source'))]
log('outputs', outputs, color="green", debug=True)
return K.Model(inputs, outputs)
def TaskModel(AI, in_specs, out_specs):
G = TaskGraph(AI, in_specs, out_specs)
# screenshot_graph(G, f'graph_{AI.hp.number}')
model = Model(G, AI)
# plot_model(model, f"model_{AI.hp.number}")
return G, model
| 37.44
| 77
| 0.615919
|
29007449f8cb2d48f3ef3710be74033753056d1b
| 37,325
|
py
|
Python
|
tests/test_tf.py
|
AkashNarayanan/pennylane-sf
|
f2dbd0c2d7346effcbd0b82793f4dd7fc6de2a8f
|
[
"Apache-2.0"
] | 14
|
2018-11-13T15:40:08.000Z
|
2020-07-18T02:33:18.000Z
|
tests/test_tf.py
|
AkashNarayanan/pennylane-sf
|
f2dbd0c2d7346effcbd0b82793f4dd7fc6de2a8f
|
[
"Apache-2.0"
] | 30
|
2018-11-30T20:11:06.000Z
|
2020-07-08T20:39:21.000Z
|
tests/test_tf.py
|
AkashNarayanan/pennylane-sf
|
f2dbd0c2d7346effcbd0b82793f4dd7fc6de2a8f
|
[
"Apache-2.0"
] | 3
|
2019-03-15T03:34:06.000Z
|
2020-01-07T12:26:18.000Z
|
# Copyright 2018-2020 Xanadu Quantum Technologies Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Unit tests for the TF plugin.
"""
import numpy as np
import pytest
tf = pytest.importorskip("tensorflow", minversion="2")
import strawberryfields as sf
import pennylane as qml
from pennylane.wires import Wires
from scipy.special import factorial as fac
psi = np.array(
[
0.08820314 + 0.14909648j,
0.32826940 + 0.32956027j,
0.26695166 + 0.19138087j,
0.32419593 + 0.08460371j,
0.02984712 + 0.30655538j,
0.03815006 + 0.18297214j,
0.17330397 + 0.2494433j,
0.14293477 + 0.25095202j,
0.21021125 + 0.30082734j,
0.23443833 + 0.19584968j,
]
)
one_mode_single_real_parameter_gates = [
("ThermalState", qml.ThermalState),
("Kerr", qml.Kerr),
("QuadraticPhase", qml.QuadraticPhase),
("Rotation", qml.Rotation),
("CubicPhase", qml.CubicPhase),
]
two_modes_single_real_parameter_gates = [
("CrossKerr", qml.CrossKerr),
("ControlledAddition", qml.ControlledAddition),
("ControlledPhase", qml.ControlledPhase),
]
# compare to reference SF engine
def SF_gate_reference(sf_op, cutoff_dim, wires, *args):
"""SF reference circuit for gate tests"""
eng = sf.Engine("fock", backend_options={"cutoff_dim": cutoff_dim})
prog = sf.Program(2)
with prog.context as q:
sf.ops.S2gate(0.1) | q
sf_op(*args) | [q[i] for i in wires]
state = eng.run(prog).state
return state.mean_photon(0)[0], state.mean_photon(1)[0]
# compare to reference SF engine
def SF_expectation_reference(sf_expectation, cutoff_dim, wires, *args):
"""SF reference circuit for expectation tests"""
eng = sf.Engine("fock", backend_options={"cutoff_dim": cutoff_dim})
prog = sf.Program(2)
with prog.context as q:
sf.ops.Dgate(0.1) | q[0]
sf.ops.S2gate(0.1) | q
state = eng.run(prog).state
return sf_expectation(state, Wires(wires), args)[0]
class TestTF:
"""Test the TF simulator."""
def test_load_tf_device(self):
"""Test that the tf plugin loads correctly"""
dev = qml.device("strawberryfields.tf", wires=2, cutoff_dim=5)
assert dev.num_wires == 2
assert dev.cutoff == 5
assert dev.hbar == 2
assert dev.shots is None
assert dev.short_name == "strawberryfields.tf"
def test_tf_args(self):
"""Test that the tf plugin requires correct arguments"""
with pytest.raises(TypeError, match="missing 1 required positional argument: 'wires'"):
dev = qml.device("strawberryfields.tf")
with pytest.raises(
TypeError, match="missing 1 required keyword-only argument: 'cutoff_dim'"
):
dev = qml.device("strawberryfields.tf", wires=1)
def test_tf_circuit(self, tol):
"""Test that the tf plugin provides correct result for simple circuit"""
dev = qml.device("strawberryfields.tf", wires=1, cutoff_dim=10)
@qml.qnode(dev, interface="tf", diff_method="backprop")
def circuit(x):
qml.Displacement(x, 0., wires=0)
return qml.expval(qml.NumberOperator(0))
res = circuit(1.)
assert isinstance(res, tf.Tensor)
assert np.allclose(res, 1, atol=tol, rtol=0)
class TestGates:
"""Tests the supported gates compared to the result from Strawberry
Fields"""
@pytest.mark.parametrize("gate_name,pennylane_gate", one_mode_single_real_parameter_gates)
def test_one_mode_single_real_parameter_gates(self, gate_name, pennylane_gate, tol):
"""Test that gates that take a single real parameter and acts on one
mode provide the correct result"""
a = 0.312
operation = pennylane_gate
wires = [0]
cutoff_dim = 10
dev = qml.device("strawberryfields.tf", wires=2, cutoff_dim=cutoff_dim)
sf_operation = dev._operation_map[gate_name]
assert dev.supports_operation(gate_name)
@qml.qnode(dev, interface="tf", diff_method="backprop")
def circuit(*args):
qml.TwoModeSqueezing(0.1, 0, wires=[0, 1])
operation(*args, wires=wires)
return qml.expval(qml.NumberOperator(0)), qml.expval(qml.NumberOperator(1))
res = circuit(a)
sf_res = SF_gate_reference(sf_operation, cutoff_dim, wires, a)
assert np.allclose(res, sf_res, atol=tol, rtol=0)
@pytest.mark.parametrize("gate_name,pennylane_gate", two_modes_single_real_parameter_gates)
def test_two_modes_single_real_parameter_gates(self, gate_name, pennylane_gate, tol):
"""Test that gates that take a single real parameter and acts on two
modes provide the correct result"""
a = 0.312
operation = pennylane_gate
wires = [0, 1]
cutoff_dim = 10
dev = qml.device("strawberryfields.tf", wires=2, cutoff_dim=cutoff_dim)
sf_operation = dev._operation_map[gate_name]
assert dev.supports_operation(gate_name)
@qml.qnode(dev, interface="tf", diff_method="backprop")
def circuit(*args):
qml.TwoModeSqueezing(0.1, 0, wires=[0, 1])
operation(*args, wires=wires)
return qml.expval(qml.NumberOperator(0)), qml.expval(qml.NumberOperator(1))
res = circuit(a)
sf_res = SF_gate_reference(sf_operation, cutoff_dim, wires, a)
assert np.allclose(res, sf_res, atol=tol, rtol=0)
def test_gaussian_state(self, tol):
"""Test that the GaussianState gate works correctly"""
V = np.array([[0.5, 0], [0, 2]])
r = np.array([0, 0])
wires = [0]
gate_name = "GaussianState"
operation = qml.GaussianState
cutoff_dim = 10
dev = qml.device("strawberryfields.tf", wires=2, cutoff_dim=cutoff_dim)
sf_operation = dev._operation_map[gate_name]
assert dev.supports_operation(gate_name)
@qml.qnode(dev, interface="tf", diff_method="backprop")
def circuit(*args):
qml.TwoModeSqueezing(0.1, 0, wires=[0, 1])
operation(*args, wires=wires)
return qml.expval(qml.NumberOperator(0)), qml.expval(qml.NumberOperator(1))
res = circuit(V, r)
sf_res = SF_gate_reference(sf_operation, cutoff_dim, wires, V, r)
assert np.allclose(res, sf_res, atol=tol, rtol=0)
def test_interferometer(self, tol):
"""Test that the Interferometer gate works correctly"""
U = np.array(
[
[0.83645892 - 0.40533293j, -0.20215326 + 0.30850569j],
[-0.23889780 - 0.28101519j, -0.88031770 - 0.29832709j],
]
)
wires = [0, 1]
gate_name = "InterferometerUnitary"
operation = qml.InterferometerUnitary
cutoff_dim = 10
dev = qml.device("strawberryfields.tf", wires=2, cutoff_dim=cutoff_dim)
sf_operation = dev._operation_map[gate_name]
assert dev.supports_operation(gate_name)
@qml.qnode(dev, interface="tf", diff_method="backprop")
def circuit(*args):
qml.TwoModeSqueezing(0.1, 0, wires=[0, 1])
operation(*args, wires=wires)
return qml.expval(qml.NumberOperator(0)), qml.expval(qml.NumberOperator(1))
res = circuit(U)
sf_res = SF_gate_reference(sf_operation, cutoff_dim, wires, U)
assert np.allclose(res, sf_res, atol=tol, rtol=0)
def test_displaced_squeezed_state(self, tol):
"""Test that the DisplacedSqueezedState gate works correctly"""
a = 0.312
b = 0.123
c = 0.532
d = 0.124
wires = [0]
gate_name = "DisplacedSqueezedState"
operation = qml.DisplacedSqueezedState
cutoff_dim = 10
dev = qml.device("strawberryfields.tf", wires=2, cutoff_dim=cutoff_dim)
sf_operation = dev._operation_map[gate_name]
assert dev.supports_operation(gate_name)
@qml.qnode(dev, interface="tf", diff_method="backprop")
def circuit(*args):
qml.TwoModeSqueezing(0.1, 0, wires=[0, 1])
operation(*args, wires=wires)
return qml.expval(qml.NumberOperator(0)), qml.expval(qml.NumberOperator(1))
res = circuit(a, b, c, d)
sf_res = SF_gate_reference(sf_operation, cutoff_dim, wires, a, b, c, d)
assert np.allclose(res, sf_res, atol=tol, rtol=0)
def test_tf_state(self, tol):
"""Test that the FockState gate works correctly"""
arg = 1
wires = [0]
gate_name = "FockState"
operation = qml.FockState
cutoff_dim = 10
dev = qml.device("strawberryfields.tf", wires=2, cutoff_dim=cutoff_dim)
sf_operation = dev._operation_map[gate_name]
assert dev.supports_operation(gate_name)
@qml.qnode(dev, interface="tf", diff_method="backprop")
def circuit(*args):
qml.TwoModeSqueezing(0.1, 0, wires=[0, 1])
operation(*args, wires=wires)
return qml.expval(qml.NumberOperator(0)), qml.expval(qml.NumberOperator(1))
res = circuit(arg)
sf_res = SF_gate_reference(sf_operation, cutoff_dim, wires, arg)
assert np.allclose(res, sf_res, atol=tol, rtol=0)
def test_tf_state_vector(self, tol):
"""Test that the FockStateVector gate works correctly"""
args = psi
wires = [0]
gate_name = "FockStateVector"
operation = qml.FockStateVector
cutoff_dim = 10
dev = qml.device("strawberryfields.tf", wires=2, cutoff_dim=cutoff_dim)
sf_operation = dev._operation_map[gate_name]
assert dev.supports_operation(gate_name)
@qml.qnode(dev, interface="tf", diff_method="backprop")
def circuit(*args):
qml.TwoModeSqueezing(0.1, 0, wires=[0, 1])
operation(*args, wires=wires)
return qml.expval(qml.NumberOperator(0)), qml.expval(qml.NumberOperator(1))
res = circuit(psi)
sf_res = SF_gate_reference(sf_operation, cutoff_dim, wires, psi)
assert np.allclose(res, sf_res, atol=tol, rtol=0)
def test_tf_density_matrix(self, tol):
"""Test that the FockDensityMatrix gate works correctly"""
dm = np.outer(psi, psi.conj())
wires = [0]
gate_name = "FockDensityMatrix"
operation = qml.FockDensityMatrix
cutoff_dim = 10
dev = qml.device("strawberryfields.tf", wires=2, cutoff_dim=cutoff_dim)
sf_operation = dev._operation_map[gate_name]
assert dev.supports_operation(gate_name)
@qml.qnode(dev, interface="tf", diff_method="backprop")
def circuit(*args):
qml.TwoModeSqueezing(0.1, 0, wires=[0, 1])
operation(*args, wires=wires)
return qml.expval(qml.NumberOperator(0)), qml.expval(qml.NumberOperator(1))
res = circuit(dm)
sf_res = SF_gate_reference(sf_operation, cutoff_dim, wires, dm)
assert np.allclose(res, sf_res, atol=tol, rtol=0)
class TestExpectation:
"""Test that all supported expectations work as expected when compared to
the Strawberry Fields results"""
def test_number_operator(self, tol):
"""Test that the expectation value of the NumberOperator observable
yields the correct result"""
cutoff_dim = 10
dev = qml.device("strawberryfields.tf", wires=2, cutoff_dim=cutoff_dim)
gate_name = "NumberOperator"
assert dev.supports_observable(gate_name)
op = qml.NumberOperator
sf_expectation = dev._observable_map[gate_name]
wires = [0]
@qml.qnode(dev, interface="tf", diff_method="backprop")
def circuit(*args):
qml.Displacement(0.1, 0, wires=0)
qml.TwoModeSqueezing(0.1, 0, wires=[0, 1])
return qml.expval(op(*args, wires=wires))
assert np.allclose(
circuit(), SF_expectation_reference(sf_expectation, cutoff_dim, wires), atol=tol, rtol=0
)
def test_tensor_number_operator(self, tol):
"""Test that the expectation value of the TensorN observable
yields the correct result"""
cutoff_dim = 10
dev = qml.device("strawberryfields.tf", wires=2, cutoff_dim=cutoff_dim)
gate_name = "TensorN"
assert dev.supports_observable(gate_name)
op = qml.TensorN
sf_expectation = dev._observable_map[gate_name]
wires = [0, 1]
@qml.qnode(dev, interface="tf", diff_method="backprop")
def circuit():
qml.Displacement(0.1, 0, wires=0)
qml.TwoModeSqueezing(0.1, 0, wires=[0, 1])
return qml.expval(op(wires=wires))
expval = circuit()
assert np.allclose(
expval, SF_expectation_reference(sf_expectation, cutoff_dim, wires), atol=tol, rtol=0
)
@pytest.mark.parametrize("gate_name,op", [("X", qml.X), ("P", qml.P)])
def test_quadrature(self, gate_name, op, tol):
"""Test that the expectation of the X and P quadrature operators yield
the correct result"""
cutoff_dim = 10
dev = qml.device("strawberryfields.tf", wires=2, cutoff_dim=cutoff_dim)
assert dev.supports_observable(gate_name)
sf_expectation = dev._observable_map[gate_name]
wires = [0]
@qml.qnode(dev, interface="tf", diff_method="backprop")
def circuit(*args):
qml.Displacement(0.1, 0, wires=0)
qml.TwoModeSqueezing(0.1, 0, wires=[0, 1])
return qml.expval(op(*args, wires=wires))
assert np.allclose(
circuit(), SF_expectation_reference(sf_expectation, cutoff_dim, wires), atol=tol, rtol=0
)
def test_quad_operator(self, tol):
"""Test that the expectation for the generalized quadrature observable
yields the correct result"""
cutoff_dim = 10
a = 0.312
dev = qml.device("strawberryfields.tf", wires=2, cutoff_dim=cutoff_dim)
op = qml.QuadOperator
gate_name = "QuadOperator"
assert dev.supports_observable(gate_name)
sf_expectation = dev._observable_map[gate_name]
wires = [0]
@qml.qnode(dev, interface="tf", diff_method="backprop")
def circuit(*args):
qml.Displacement(0.1, 0, wires=0)
qml.TwoModeSqueezing(0.1, 0, wires=[0, 1])
return qml.expval(op(*args, wires=wires))
assert np.allclose(
circuit(a),
SF_expectation_reference(sf_expectation, cutoff_dim, wires, a),
atol=tol,
rtol=0,
)
def test_polyxp(self, tol):
"""Test that PolyXP works as expected"""
cutoff_dim = 12
a = 0.14321
nbar = 0.2234
hbar = 2
dev = qml.device("strawberryfields.tf", wires=1, hbar=hbar, cutoff_dim=cutoff_dim)
Q = np.array([0, 1, 0]) # x expectation
@qml.qnode(dev, interface="tf", diff_method="backprop")
def circuit(x):
qml.Displacement(x, 0, wires=0)
return qml.expval(qml.PolyXP(Q, 0))
# test X expectation
assert np.allclose(circuit(a), hbar * a, atol=tol, rtol=0)
Q = np.diag([-0.5, 1 / (2 * hbar), 1 / (2 * hbar)]) # mean photon number
@qml.qnode(dev, interface="tf", diff_method="backprop")
def circuit(x):
qml.ThermalState(nbar, wires=0)
qml.Displacement(x, 0, wires=0)
return qml.expval(qml.PolyXP(Q, 0))
# test X expectation
assert np.allclose(circuit(a), nbar + np.abs(a) ** 2, atol=tol, rtol=0)
def test_fock_state_projector(self, tol):
"""Test that FockStateProjector works as expected"""
cutoff_dim = 12
a = 0.54321
r = 0.123
hbar = 2
dev = qml.device("strawberryfields.tf", wires=2, hbar=hbar, cutoff_dim=cutoff_dim)
# test correct number state expectation |<n|a>|^2
@qml.qnode(dev, interface="tf", diff_method="backprop")
def circuit(x):
qml.Displacement(x, 0, wires=0)
return qml.expval(qml.FockStateProjector(np.array([2]), wires=0))
expected = np.abs(np.exp(-np.abs(a) ** 2 / 2) * a ** 2 / np.sqrt(2)) ** 2
assert np.allclose(circuit(a), expected, atol=tol, rtol=0)
# test correct number state expectation |<n|S(r)>|^2
@qml.qnode(dev, interface="tf", diff_method="backprop")
def circuit(x):
qml.Squeezing(x, 0, wires=0)
return qml.expval(qml.FockStateProjector(np.array([2, 0]), wires=[0, 1]))
expected = np.abs(np.sqrt(2) / (2) * (-np.tanh(r)) / np.sqrt(np.cosh(r))) ** 2
assert np.allclose(circuit(r), expected, atol=tol, rtol=0)
def test_trace(self, tol):
"""Test that Identity expectation works as expected"""
cutoff_dim = 5
r1 = 0.5
r2 = 0.7
hbar = 2
dev = qml.device("strawberryfields.tf", wires=2, hbar=hbar, cutoff_dim=cutoff_dim)
@qml.qnode(dev, interface="tf", diff_method="backprop")
def circuit(x, y):
qml.Squeezing(x, 0, wires=0)
qml.Squeezing(y, 0, wires=1)
return qml.expval(qml.Identity(wires=[0])), qml.expval(qml.Identity(wires=[1]))
# reference SF circuit
def SF_gate_reference_trace(x, y):
"""SF reference circuit"""
eng = sf.Engine("tf", backend_options={"cutoff_dim": cutoff_dim})
prog = sf.Program(2)
with prog.context as q:
sf.ops.Sgate(x) | q[0]
sf.ops.Sgate(y) | q[1]
state = eng.run(prog).state
return state.trace()
# test trace < 1 for high squeezing
expected = SF_gate_reference_trace(r1, r2)
assert np.allclose(circuit(r1, r2), expected, atol=tol, rtol=0)
def test_trace_subsystems(self, tol):
"""Test that Identity expectation is one on a subset of wires"""
cutoff_dim = 15
r1 = 0.01
r2 = 0.04
hbar = 2
dev = qml.device("strawberryfields.tf", wires=2, hbar=hbar, cutoff_dim=cutoff_dim)
@qml.qnode(dev, interface="tf", diff_method="backprop")
def circuit(x, y):
qml.Squeezing(x, 0, wires=0)
qml.Squeezing(y, 0, wires=1)
return qml.expval(qml.Identity(wires=[0]))
assert np.allclose(circuit(r1, r2), 1, atol=tol, rtol=0)
class TestVariance:
"""Test for the device variance"""
def test_first_order_cv(self, tol):
"""Test variance of a first order CV expectation value"""
dev = qml.device("strawberryfields.tf", wires=1, cutoff_dim=15)
@qml.qnode(dev, interface="tf", diff_method="backprop")
def circuit(r, phi):
qml.Squeezing(r, 0, wires=0)
qml.Rotation(phi, wires=0)
return qml.var(qml.X(0))
r = tf.Variable(0.105)
phi = tf.Variable(-0.654)
with tf.GradientTape() as tape:
var = circuit(r, phi)
expected = (
tf.math.exp(2 * r) * tf.math.sin(phi) ** 2 + tf.math.exp(-2 * r) * tf.math.cos(phi) ** 2
)
assert np.allclose(var, expected, atol=tol, rtol=0)
def test_second_order_cv(self, tol):
"""Test variance of a second order CV expectation value"""
dev = qml.device("strawberryfields.tf", wires=1, cutoff_dim=15)
@qml.qnode(dev, interface="tf", diff_method="backprop")
def circuit(n, a):
qml.ThermalState(n, wires=0)
qml.Displacement(a, 0, wires=0)
return qml.var(qml.NumberOperator(0))
n = tf.Variable(0.12)
a = tf.Variable(0.105)
with tf.GradientTape() as tape:
var = circuit(n, a)
expected = n ** 2 + n + np.abs(a) ** 2 * (1 + 2 * n)
assert np.allclose(var, expected, atol=tol, rtol=0)
def test_polyxp_variance(self, tol):
"""Tests that variance for PolyXP measurement works"""
dev = qml.device("strawberryfields.tf", wires=1, cutoff_dim=15)
@qml.qnode(dev, interface="tf", diff_method="backprop")
def circuit(r, phi):
qml.Squeezing(r, 0, wires=0)
qml.Rotation(phi, wires=0)
return qml.var(qml.PolyXP(np.array([0, 1, 0]), wires=0))
r = 0.105
phi = -0.654
var = circuit(r, phi)
expected = np.exp(2 * r) * np.sin(phi) ** 2 + np.exp(-2 * r) * np.cos(phi) ** 2
assert np.allclose(var, expected, atol=tol, rtol=0)
class TestProbability:
"""Integration tests for returning probabilities"""
def test_single_mode_probability(self, tol):
"""Test that a coherent state returns the correct probability"""
cutoff = 10
dev = qml.device("strawberryfields.tf", wires=1, cutoff_dim=cutoff)
@qml.qnode(dev, interface="tf", diff_method="backprop")
def circuit(a, phi):
qml.Displacement(a, phi, wires=0)
return qml.probs(wires=0)
a = 0.4
phi = -0.12
alpha = a * np.exp(1j * phi)
n = np.arange(cutoff)
ref_probs = np.abs(np.exp(-0.5 * np.abs(alpha) ** 2) * alpha ** n / np.sqrt(fac(n))) ** 2
res = circuit(a, phi)
assert np.allclose(res, ref_probs, atol=tol, rtol=0)
def test_multi_mode_probability(self, tol):
"""Test that a product of coherent states returns the correct probability"""
cutoff = 10
dev = qml.device("strawberryfields.tf", wires=2, cutoff_dim=cutoff)
@qml.qnode(dev, interface="tf", diff_method="backprop")
def circuit(a, phi):
qml.Displacement(a, phi, wires=0)
qml.Displacement(a, phi, wires=1)
return qml.probs(wires=[0, 1])
a = 0.4
phi = -0.12
alpha = a * np.exp(1j * phi)
n = np.arange(cutoff)
ref_probs = np.abs(np.exp(-0.5 * np.abs(alpha) ** 2) * alpha ** n / np.sqrt(fac(n))) ** 2
ref_probs = np.kron(ref_probs, ref_probs)
res = circuit(a, phi)
assert np.allclose(res, ref_probs, atol=tol, rtol=0)
def test_marginal_probability(self, tol):
"""Test that a coherent state marginal probability is correct"""
cutoff = 10
dev = qml.device("strawberryfields.tf", wires=2, cutoff_dim=cutoff)
@qml.qnode(dev, interface="tf", diff_method="backprop")
def circuit(a, phi):
qml.Displacement(a, phi, wires=1)
return qml.probs(wires=1)
a = 0.4
phi = -0.12
alpha = a * np.exp(1j * phi)
n = np.arange(cutoff)
ref_probs = np.abs(np.exp(-0.5 * np.abs(alpha) ** 2) * alpha ** n / np.sqrt(fac(n))) ** 2
res = circuit(a, phi)
assert np.allclose(res, ref_probs, atol=tol, rtol=0)
class TestPassthruGradients:
"""Test various gradients working correctly with the backprop method"""
def test_gradient_coherent(self, tol):
"""Test that the jacobian of the probability for a coherent states is
approximated well with finite differences"""
cutoff = 10
dev = qml.device("strawberryfields.tf", wires=1, cutoff_dim=cutoff)
@qml.qnode(dev, interface="tf", diff_method="backprop")
def circuit(a, phi):
qml.Displacement(a, phi, wires=0)
return qml.probs(wires=[0])
a = tf.Variable(0.4)
phi = tf.Variable(-0.12)
n = np.arange(cutoff)
with tf.GradientTape(persistent=True) as tape:
res = circuit(a, phi)
# differentiate with respect to parameter a
grad = tape.jacobian(res, a)
expected_gradient = 2 * tf.math.exp(-(a ** 2)) * a ** (2 * n - 1) * (n - a ** 2) / fac(n)
assert np.allclose(grad, expected_gradient, atol=tol, rtol=0)
# differentiate with respect to parameter phi
grad = tape.jacobian(res, phi)
expected_gradient = 0
assert np.allclose(grad, expected_gradient, atol=tol, rtol=0)
def test_gradient_squeezed(self, tol):
"""Test that the jacobian of the probability for a squeezed states is
approximated well with finite differences"""
cutoff = 5
dev = qml.device("strawberryfields.tf", wires=1, cutoff_dim=cutoff)
@qml.qnode(dev, interface="tf", diff_method="backprop")
def circuit(r, phi):
qml.Squeezing(r, phi, wires=0)
return qml.probs(wires=[0])
r = tf.Variable(0.4)
phi = tf.Variable(-0.12)
n = np.arange(cutoff)
with tf.GradientTape(persistent=True) as tape:
res = circuit(r, phi)
# differentiate with respect to parameter r
grad = tape.jacobian(res, r)
assert grad.shape == (cutoff,)
expected_gradient = (
np.abs(tf.math.tanh(r)) ** n
* (1 + 2 * n - tf.math.cosh(2 * r))
* fac(n)
/ (2 ** (n + 1) * tf.math.cosh(r) ** 2 * tf.math.sinh(r) * fac(n / 2) ** 2)
)
expected_gradient = expected_gradient.numpy()
expected_gradient[n % 2 != 0] = 0
assert np.allclose(grad, expected_gradient, atol=tol, rtol=0)
# differentiate with respect to parameter phi
grad = tape.jacobian(res, phi)
expected_gradient = 0
assert np.allclose(grad, expected_gradient, atol=tol, rtol=0)
def test_gradient_coherent_two_wires(self, tol):
"""Test that the jacobian of the probability for a coherent states on
two wires is approximated well with finite differences"""
cutoff = 4
dev = qml.device("strawberryfields.tf", wires=2, cutoff_dim=cutoff)
@qml.qnode(dev, interface="tf", diff_method="backprop")
def circuit(a, phi):
qml.Displacement(a, phi, wires=0)
qml.Displacement(a, phi, wires=1)
return qml.probs(wires=[0, 1])
a = tf.Variable(0.4)
phi = tf.Variable(-0.12)
with tf.GradientTape(persistent=True) as tape:
res = circuit(a, phi)
c = np.arange(cutoff)
d = np.arange(cutoff)
n0, n1 = np.meshgrid(c, d)
n0 = n0.flatten()
n1 = n1.flatten()
# differentiate with respect to parameter a
grad = tape.jacobian(res, a)
expected_gradient = (
2
* (a ** (-1 + 2 * n0 + 2 * n1))
* tf.math.exp(-2 * a ** 2)
* (-2 * a ** 2 + n0 + n1)
/ (fac(n0) * fac(n1))
)
assert np.allclose(grad, expected_gradient, atol=tol, rtol=0)
# differentiate with respect to parameter phi
grad = tape.jacobian(res, phi)
expected_gradient = 0
assert np.allclose(grad, expected_gradient, atol=tol, rtol=0)
def test_gradient_displaced_thermal_var_photon(self, tol):
"""Test gradient of the photon variance of a displaced thermal state"""
dev = qml.device("strawberryfields.tf", wires=1, cutoff_dim=15)
op = qml.TensorN(wires=[0])
# Check that instantiating TensorN on one mode returns the
# NumberOperator
assert isinstance(op, qml.NumberOperator)
@qml.qnode(dev, interface="tf", diff_method="backprop")
def circuit(n, a):
qml.ThermalState(n, wires=0)
qml.Displacement(a, 0, wires=0)
return qml.var(op)
n = tf.Variable(0.12)
a = tf.Variable(0.105)
with tf.GradientTape(persistent=True) as tape:
var = circuit(n, a)
# circuit jacobians
grad = tape.gradient(var, [n, a])
expected = np.array([2 * a ** 2 + 2 * n + 1, 2 * a * (2 * n + 1)])
assert np.allclose(grad, expected, atol=tol, rtol=0)
def test_gradient_squeezed_var_photon(self, tol):
"""Test gradient of the photon variance of a squeezed state"""
dev = qml.device("strawberryfields.tf", wires=1, cutoff_dim=15)
@qml.qnode(dev, interface="tf", diff_method="backprop")
def circuit(r, phi):
qml.Squeezing(r, 0, wires=0)
qml.Rotation(phi, wires=0)
return qml.var(qml.X(0))
r = tf.Variable(0.105)
phi = tf.Variable(-0.654)
with tf.GradientTape() as tape:
var = circuit(r, phi)
# circuit jacobians
grad = tape.gradient(var, [r, phi])
expected = np.array(
[
2 * tf.math.exp(2 * r) * tf.math.sin(phi) ** 2
- 2 * tf.math.exp(-2 * r) * tf.math.cos(phi) ** 2,
2 * tf.math.sinh(2 * r) * tf.math.sin(2 * phi),
]
)
assert np.allclose(grad, expected, atol=tol, rtol=0)
def test_gradient_second_order_cv(self, tol):
"""Test variance of a second order CV variance"""
dev = qml.device("strawberryfields.tf", wires=1, cutoff_dim=15)
@qml.qnode(dev, interface="tf", diff_method="backprop")
def circuit(n, a):
qml.ThermalState(n, wires=0)
qml.Displacement(a, 0, wires=0)
return qml.var(qml.NumberOperator(0))
n = tf.Variable(0.12)
a = tf.Variable(0.105)
with tf.GradientTape() as tape:
var = circuit(n, a)
expected = n ** 2 + n + np.abs(a) ** 2 * (1 + 2 * n)
assert np.allclose(var, expected, atol=tol, rtol=0)
# circuit jacobians
grad = tape.gradient(var, [n, a])
expected = np.array([2 * a ** 2 + 2 * n + 1, 2 * a * (2 * n + 1)])
assert np.allclose(grad, expected, atol=tol, rtol=0)
def test_coherent_ket_gradient(self, tol):
"""Test whether the gradient of the third element (|2>) of the coherent
state vector is correct."""
dev = qml.device("strawberryfields.tf", wires=1, cutoff_dim=15)
@qml.qnode(dev, interface="tf", diff_method="backprop")
def circuit(a):
qml.Displacement(a, 0, wires=0)
return qml.expval(qml.Identity(0))
a = tf.Variable(0.23)
with tf.GradientTape() as tape:
circuit(a)
res = tf.cast(dev.state.ket()[2], dtype=tf.float64)
res_ex = np.exp(-0.5 * a ** 2) * a ** 2 / np.sqrt(2)
assert np.allclose(res, res_ex, atol=tol, rtol=0)
grad = tape.gradient(res, [a])
grad_ex = -a * (a ** 2 - 2) * np.exp(-(a ** 2) / 2) / np.sqrt(2)
assert np.allclose(grad, grad_ex, atol=tol, rtol=0)
def test_coherent_dm_gradient(self, tol):
"""Test whether the gradient of the 3, 3 element of the coherent
density matrix is correct."""
dev = qml.device("strawberryfields.tf", wires=1, cutoff_dim=15)
@qml.qnode(dev, interface="tf", diff_method="backprop")
def circuit(a):
qml.Displacement(a, 0, wires=0)
return qml.expval(qml.Identity(0))
a = tf.Variable(0.23)
with tf.GradientTape() as tape:
circuit(a)
res = tf.cast(dev.state.dm()[2, 2], dtype=tf.float64)
res_ex = (np.exp(-0.5 * a ** 2) * a ** 2 / np.sqrt(2)) ** 2
assert np.allclose(res, res_ex, atol=tol, rtol=0)
grad = tape.gradient(res, [a])
grad_ex = -(a ** 3) * (a ** 2 - 2) * np.exp(-(a ** 2))
assert np.allclose(grad, grad_ex, atol=tol, rtol=0)
def test_2mode_squeezed_vacuum_gradients(self, tol):
"""Tests whether the gradient for the probability of the states |0,0> and |1,1>
created by an S2gate is correct."""
cutoff = 15
dev = qml.device("strawberryfields.tf", wires=2, cutoff_dim=cutoff)
@qml.qnode(dev, interface="tf", diff_method="backprop")
def circuit(r, phi, input_state, output_state):
qml.FockStateVector(input_state, wires=[0, 1])
qml.TwoModeSqueezing(r, phi, wires=[0, 1])
return qml.expval(qml.FockStateProjector(output_state, wires=[0, 1]))
R = 0.3
PHI = 0.2
r = tf.Variable(R)
phi = tf.Variable(PHI)
vacuum = np.zeros((cutoff, cutoff), dtype=np.complex64)
vacuum[0, 0] = 1.0 + 0.0j
vacuum = tf.constant(vacuum)
with tf.GradientTape(persistent=True) as tape:
prob00 = circuit(r, phi, vacuum, np.array([0, 0]))
prob11 = circuit(r, phi, vacuum, np.array([1, 1]))
r_grad, phi_grad = tape.gradient(prob00, [r, phi])
assert np.allclose(r_grad, -2 * np.tanh(R) / np.cosh(R) ** 2, atol=tol, rtol=0)
assert np.allclose(phi_grad, 0.0, atol=tol, rtol=0)
r_grad, phi_grad = tape.gradient(prob11, [r, phi])
assert np.allclose(
r_grad, 2 * (np.sinh(R) - np.sinh(R) ** 3) / np.cosh(R) ** 5, atol=tol, rtol=0
)
assert np.allclose(phi_grad, 0.0, atol=tol, rtol=0)
class TestHighLevelIntegration:
"""Tests for integration with higher level components of PennyLane."""
def test_template_integration(self, tol):
"""Test that a PassthruQNode strawberryfields.tf works with templates."""
dev = qml.device("strawberryfields.tf", wires=3, cutoff_dim=5)
@qml.qnode(dev, interface="tf", diff_method="backprop")
def circuit(weights):
for i in range(3):
qml.Squeezing(0.1, 0, wires=i)
# NOTE: The interferometer template iterates over the weights.
# Unfortunately, tf.Variable objects are not iterable; multiplying them
# by '1.0' converts them into tf.Tensor objects, which *are* iterable.
qml.templates.Interferometer(
weights[0] * 1.0,
weights[1] * 1.0,
weights[2] * 1.0,
wires=[0, 1, 2],
mesh="rectangular",
)
return qml.probs(wires=0)
theta = np.random.uniform(low=0, high=3, size=(3,))
phi = np.random.uniform(low=0, high=2, size=(3,))
varphi = np.random.uniform(low=0, high=2, size=(3,))
weights = [theta, phi, varphi]
weights = [tf.Variable(w) for w in weights]
with tf.GradientTape() as tape:
res = circuit(weights)
grad = tape.gradient(res, weights)
assert isinstance(grad, list)
assert len(grad) == 3
assert all(isinstance(g, tf.Tensor) for g in grad)
assert all(g.shape == w.shape for g, w in zip(grad, weights))
def test_qnode_collection_integration(self, tol):
"""Test that a PassthruQNode strawberryfields.tf works with QNodeCollections."""
cutoff = 15
dev = qml.device("strawberryfields.tf", wires=2, cutoff_dim=cutoff)
def circuit(weights, input_state=None, **kwargs):
qml.FockStateVector(input_state, wires=[0, 1])
qml.TwoModeSqueezing(weights[0], weights[1], wires=[0, 1])
obs_list = [
qml.FockStateProjector(np.array([0, 0]), wires=[0, 1]),
qml.FockStateProjector(np.array([1, 1]), wires=[0, 1]),
]
qnodes = qml.map(circuit, obs_list, dev, interface="tf")
assert qnodes.interface == "tf"
weights = tf.Variable([0.12, -0.543])
def cost(weights):
vacuum = np.zeros((cutoff, cutoff), dtype=np.complex64)
vacuum[0, 0] = 1.0 + 0.0j
vacuum = tf.constant(vacuum)
return tf.reduce_sum(qnodes(weights, input_state=vacuum))
with tf.GradientTape() as tape:
res = cost(weights)
grad = tape.gradient(res, weights)
assert isinstance(grad, tf.Tensor)
assert grad.shape == weights.shape
R = weights[0]
expected_grad = [
-2 * tf.math.tanh(R) / tf.math.cosh(R) ** 2
+ 2 * (tf.math.sinh(R) - tf.math.sinh(R) ** 3) / tf.math.cosh(R) ** 5,
0,
]
assert np.allclose(grad, expected_grad, atol=tol, rtol=0)
| 36.062802
| 101
| 0.579156
|
e3b990ff98d25a57bb06931932cccb083cae5304
| 1,498
|
py
|
Python
|
sdk/core/azure-core/azure/core/common.py
|
kushan2018/azure-sdk-for-python
|
08a9296207281f4e90e23cf7a30173863accc867
|
[
"MIT"
] | null | null | null |
sdk/core/azure-core/azure/core/common.py
|
kushan2018/azure-sdk-for-python
|
08a9296207281f4e90e23cf7a30173863accc867
|
[
"MIT"
] | 1
|
2019-06-04T18:12:16.000Z
|
2019-06-04T18:12:16.000Z
|
sdk/core/azure-core/azure/core/common.py
|
kushan2018/azure-sdk-for-python
|
08a9296207281f4e90e23cf7a30173863accc867
|
[
"MIT"
] | 1
|
2019-06-17T22:18:23.000Z
|
2019-06-17T22:18:23.000Z
|
# --------------------------------------------------------------------------
#
# Copyright (c) Microsoft Corporation. All rights reserved.
#
# The MIT License (MIT)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the ""Software""), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
# --------------------------------------------------------------------------
from typing import Union, Optional
from azure.core.exceptions import (
ServiceRequestError,
ServiceResponseError
)
ErrorType = Optional[Union[ServiceRequestError, ServiceResponseError]]
| 45.393939
| 78
| 0.698932
|
f4317ed285dfbc7f14f2f9ebe37da9c022c9230c
| 5,779
|
py
|
Python
|
engine/classes.py
|
underground-lab/zelezobeton
|
5afd96c3123b3d8accca107c6e60a3f6fdecdf27
|
[
"MIT"
] | 2
|
2022-01-10T11:46:46.000Z
|
2022-01-11T08:32:54.000Z
|
engine/classes.py
|
underground-lab/zelezobeton
|
5afd96c3123b3d8accca107c6e60a3f6fdecdf27
|
[
"MIT"
] | 1
|
2022-01-13T09:16:25.000Z
|
2022-01-14T08:39:12.000Z
|
engine/classes.py
|
underground-lab/zelezobeton
|
5afd96c3123b3d8accca107c6e60a3f6fdecdf27
|
[
"MIT"
] | null | null | null |
from copy import deepcopy
from dataclasses import dataclass, field
from typing import Optional
@dataclass
class Room:
description: str
exits: dict = field(default_factory=dict)
class Object:
def __init__(self, name, location='undiscovered', actions=None, **kwargs):
self.name = name
self.location = location
self.actions = actions or {}
# set additional instance variables
vars(self).update(kwargs)
def __eq__(self, other):
return type(self) is type(other) and vars(self) == vars(other)
@dataclass
class Action:
condition: list = field(default_factory=list)
impact: list = field(default_factory=list)
message: Optional[str] = None
class Game:
message_ok = 'OK'
def __init__(self, rooms, objects, current_room_key='start'):
self.rooms = self._rooms_from_data(rooms)
self.objects = self._objects_from_data(objects)
self.current_room_key = current_room_key
def _rooms_from_data(self, data):
return {
key: self._ensure_class(item, Room)
for key, item in deepcopy(data).items()
}
def _objects_from_data(self, data):
result = {}
for key, item in deepcopy(data).items():
obj = self._ensure_class(item, Object)
# replace action specs with Action instances
obj.actions = {
key: [
self._ensure_class(item, Action)
for item in self._ensure_list(action_specs)
]
for key, action_specs in obj.actions.items()
}
result[key] = obj
return result
@staticmethod
def _ensure_class(obj, cls):
if isinstance(obj, cls):
return obj
return cls(**obj)
@staticmethod
def _ensure_list(action_specs):
if isinstance(action_specs, list):
return action_specs
return [action_specs]
def process_command(self, command, *params):
if command in self.current_room.exits:
self.current_room_key = self.current_room.exits[command]
return self.message_ok
if not params:
raise InvalidCommand(command)
obj_key = params[0]
if obj_key in self.objects_with_action(command):
for action in self.objects[obj_key].actions[command]:
if self._conditions_met(action):
self._apply_impact(action)
return action.message or self.message_ok
raise InvalidCommand(command, obj_key)
def _conditions_met(self, action):
return all(
getattr(self, callback_name)(**kwargs)
for callback_name, kwargs in action.condition
)
def _apply_impact(self, action):
for callback_name, kwargs in action.impact:
getattr(self, callback_name)(**kwargs)
@property
def current_room(self):
return self.rooms[self.current_room_key]
@property
def objects_in_room(self):
return {
obj_key: obj for obj_key, obj in self.objects.items()
if obj.location == self.current_room_key
}
@property
def objects_in_inventory(self):
return {
obj_key: obj for obj_key, obj in self.objects.items()
if obj.location == 'inventory'
}
@property
def visible_objects(self):
return {**self.objects_in_room, **self.objects_in_inventory}
def objects_with_action(self, action_name):
return {
obj_key: obj for obj_key, obj in self.visible_objects.items()
if action_name in obj.actions
and any(self._conditions_met(action) for action in obj.actions[action_name])
}
def available_actions(self):
result = {}
for action_name in ('examine', 'take', 'open', 'use'):
objects = self.objects_with_action(action_name)
if objects:
result[action_name] = objects
return result
# callbacks that don't modify game state
def in_room(self, obj):
return self.objects[obj].location == self.current_room_key
def in_inventory(self, obj):
return self.objects[obj].location == 'inventory'
def is_visible(self, obj):
return self.in_room(obj) or self.in_inventory(obj)
def is_undiscovered(self, obj):
return self.objects[obj].location == 'undiscovered'
def is_gone(self, obj):
return self.objects[obj].location == 'gone'
def current_room_is(self, room):
return self.current_room_key == room
def exit_closed(self, room, direction):
return direction not in self.rooms[room].exits
def is_true(self, obj, attr):
return getattr(self.objects[obj], attr, None) is True
def not_true(self, obj, attr):
return not self.is_true(obj, attr)
# callbacks that modify game state
def move_to_room(self, obj, room):
self.objects[obj].location = room
def move_to_current_room(self, obj):
self.objects[obj].location = self.current_room_key
def move_to_inventory(self, obj):
self.objects[obj].location = 'inventory'
def remove_object(self, obj):
self.objects[obj].location = 'gone'
def open_exit(self, room, direction, room_2):
self.rooms[room].exits[direction] = room_2
def close_exit(self, room, direction):
exits = self.rooms[room].exits
if direction in exits:
del exits[direction]
def set_true(self, obj, attr):
setattr(self.objects[obj], attr, True)
def __eq__(self, other):
return type(self) is type(other) and vars(self) == vars(other)
class InvalidCommand(NotImplementedError):
pass
| 29.78866
| 88
| 0.625022
|
c13588ce53e7b272af716cd94f859f996d0c1c1e
| 711
|
py
|
Python
|
mars/services/storage/api/__init__.py
|
haijohn/mars
|
672b3a33a70565f01b1a3f508908445491d85acf
|
[
"Apache-2.0"
] | 1
|
2021-06-10T02:43:01.000Z
|
2021-06-10T02:43:01.000Z
|
mars/services/storage/api/__init__.py
|
JeffroMF/mars
|
2805241ac55b50c4f6319baa41113fbf8c723832
|
[
"Apache-2.0"
] | null | null | null |
mars/services/storage/api/__init__.py
|
JeffroMF/mars
|
2805241ac55b50c4f6319baa41113fbf8c723832
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 1999-2020 Alibaba Group Holding Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from .core import AbstractStorageAPI
from .oscar import StorageAPI, MockStorageAPI
from .web import WebStorageAPI
| 39.5
| 74
| 0.776371
|
6e54807d107b35f02d37c67d735409bd204e2a02
| 200
|
py
|
Python
|
bot/cogs/moderation.py
|
Spaxly/Starter-Dashboard
|
93d4039a441275c74da80c6942e622362a09bec7
|
[
"Apache-2.0"
] | 3
|
2021-05-28T06:46:41.000Z
|
2021-07-22T12:47:54.000Z
|
bot/cogs/moderation.py
|
Spaxly/Starter-Dashboard
|
93d4039a441275c74da80c6942e622362a09bec7
|
[
"Apache-2.0"
] | null | null | null |
bot/cogs/moderation.py
|
Spaxly/Starter-Dashboard
|
93d4039a441275c74da80c6942e622362a09bec7
|
[
"Apache-2.0"
] | 3
|
2021-05-12T14:35:54.000Z
|
2021-11-15T19:02:34.000Z
|
import discord
from discord.ext import commands
class Moderation(commands.Cog):
def __init__(self, client):
self.client = client
def setup(client):
client.add_cog(Moderation(client))
| 22.222222
| 38
| 0.735
|
92322af496a864ed5ebaaa1f80b1ce1bb0c04b4d
| 3,371
|
py
|
Python
|
lib/modules/situational_awareness/network/powerview/get_domain_trust.py
|
asmc/Empire-Win
|
af27f437ee919c9b65e3c2dd7c006f11ff5d5b2a
|
[
"BSD-3-Clause"
] | 3
|
2019-08-12T01:46:51.000Z
|
2020-09-12T12:00:16.000Z
|
lib/modules/situational_awareness/network/powerview/get_domain_trust.py
|
ninj4c0d3r/Empire
|
e07e7ca5398dce82f9ce10028355f909d102838b
|
[
"BSD-3-Clause"
] | null | null | null |
lib/modules/situational_awareness/network/powerview/get_domain_trust.py
|
ninj4c0d3r/Empire
|
e07e7ca5398dce82f9ce10028355f909d102838b
|
[
"BSD-3-Clause"
] | 5
|
2017-10-17T08:16:13.000Z
|
2021-04-30T02:41:02.000Z
|
from lib.common import helpers
class Module:
def __init__(self, mainMenu, params=[]):
self.info = {
'Name': 'Get-NetDomainTrust',
'Author': ['@harmj0y'],
'Description': ('Return all domain trusts for the current domain or '
'a specified domain. Part of PowerView.'),
'Background' : True,
'OutputExtension' : None,
'NeedsAdmin' : False,
'OpsecSafe' : True,
'MinPSVersion' : '2',
'Comments': [
'https://github.com/PowerShellEmpire/PowerTools/tree/master/PowerView'
]
}
# any options needed by the module, settable during runtime
self.options = {
# format:
# value_name : {description, required, default_value}
'Agent' : {
'Description' : 'Agent to run module on.',
'Required' : True,
'Value' : ''
},
'Domain' : {
'Description' : 'The domain whose trusts to enumerate, defaults to the current domain.',
'Required' : False,
'Value' : ''
},
'DomainController' : {
'Description' : 'Domain controller to reflect LDAP queries through.',
'Required' : False,
'Value' : ''
},
'LDAP' : {
'Description' : 'Switch. Use LDAP queries to enumerate the trusts instead of direct domain connections.',
'Required' : False,
'Value' : ''
}
}
# save off a copy of the mainMenu object to access external functionality
# like listeners/agent handlers/etc.
self.mainMenu = mainMenu
for param in params:
# parameter format is [Name, Value]
option, value = param
if option in self.options:
self.options[option]['Value'] = value
def generate(self):
moduleName = self.info["Name"]
# read in the common powerview.ps1 module source code
moduleSource = self.mainMenu.installPath + "/data/module_source/situational_awareness/network/powerview.ps1"
try:
f = open(moduleSource, 'r')
except:
print helpers.color("[!] Could not read module source path at: " + str(moduleSource))
return ""
moduleCode = f.read()
f.close()
# get just the code needed for the specified function
script = helpers.generate_dynamic_powershell_script(moduleCode, moduleName)
script += moduleName + " "
for option,values in self.options.iteritems():
if option.lower() != "agent":
if values['Value'] and values['Value'] != '':
if values['Value'].lower() == "true":
# if we're just adding a switch
script += " -" + str(option)
else:
script += " -" + str(option) + " " + str(values['Value'])
script += ' | Out-String | %{$_ + \"`n\"};"`n'+str(moduleName)+' completed!"'
return script
| 33.71
| 125
| 0.484129
|
4dd78d536a740b3441bd4416dd1c301f9a51e503
| 2,420
|
py
|
Python
|
docs/conf.py
|
monotropauniflora/PartSeg
|
68ccf8df4162174056028cbb677ab729e4c31994
|
[
"BSD-3-Clause"
] | null | null | null |
docs/conf.py
|
monotropauniflora/PartSeg
|
68ccf8df4162174056028cbb677ab729e4c31994
|
[
"BSD-3-Clause"
] | null | null | null |
docs/conf.py
|
monotropauniflora/PartSeg
|
68ccf8df4162174056028cbb677ab729e4c31994
|
[
"BSD-3-Clause"
] | null | null | null |
# Configuration file for the Sphinx documentation builder.
#
# This file only contains a selection of the most common options. For a full
# list see the documentation:
# http://www.sphinx-doc.org/en/master/config
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
import PartSeg
# -- Project information -----------------------------------------------------
project = "PartSeg"
copyright = "2019, Laboratory of Functional and Structural Genomics"
author = "Grzegorz Bokota (LFSG)"
# The full version, including alpha/beta/rc tags
release = PartSeg.__version__
# -- General configuration ---------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.intersphinx",
"sphinx_qt_documentation",
"sphinx.ext.viewcode",
"sphinx.ext.graphviz",
"PartSegCore.sphinx.auto_parameters",
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ["_templates"]
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = "nature"
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ["_static"]
master_doc = "index"
intersphinx_mapping = {
"python": ("https://docs.python.org/3", None),
"PyQt5": ("https://www.riverbankcomputing.com/static/Docs/PyQt5", None),
"Numpy": ("https://docs.scipy.org/doc/numpy/", None),
}
qt_documentation = "Qt5"
| 33.150685
| 79
| 0.667769
|
8a91f2f48d59fec92dc4fc869f5dc04e9cc53e39
| 3,321
|
py
|
Python
|
tests/test_falconx_sandbox.py
|
kra-ts/falconpy
|
c7c4ed93cb3b56cdfd86757f573fde57e4ccf857
|
[
"Unlicense"
] | null | null | null |
tests/test_falconx_sandbox.py
|
kra-ts/falconpy
|
c7c4ed93cb3b56cdfd86757f573fde57e4ccf857
|
[
"Unlicense"
] | null | null | null |
tests/test_falconx_sandbox.py
|
kra-ts/falconpy
|
c7c4ed93cb3b56cdfd86757f573fde57e4ccf857
|
[
"Unlicense"
] | null | null | null |
"""
test_falconx_sandbox.py - This class tests the falconx_sandbox service class
"""
import os
import sys
import pytest
# Authentication via the test_authorization.py
from tests import test_authorization as Authorization
# Import our sibling src folder into the path
sys.path.append(os.path.abspath('src'))
# Classes to test - manually imported from sibling folder
from falconpy import FalconXSandbox
auth = Authorization.TestAuthorization()
config = auth.getConfigObject()
falcon = FalconXSandbox(auth_object=config)
AllowedResponses = [200, 201, 400, 403, 404, 429] # Adding rate-limiting as an allowed response for now
class TestFalconXSandbox:
"""
Test Harness for the Falcon X Sandbox Service Class
"""
def falconx_generate_errors(self):
"""
Executes every statement in every method of the class, accepts all errors except 500
"""
error_checks = True
tests = {
"get_artifacts": falcon.GetArtifacts(parameters={}),
"get_summary_reports": falcon.GetSummaryReports(ids='12345678'),
"get_reports": falcon.GetReports(ids='12345678'),
"delete_report": falcon.DeleteReport(ids='12345678'),
"get_submissions": falcon.GetSubmissions(ids='12345678'),
"submit": falcon.Submit(document_password="banana",
enable_tor=False,
environment_id=300,
send_email_notifications=False,
user_tags="apples,bananas"
),
"query_reports": falcon.QueryReports(),
"query_submissions": falcon.QuerySubmissions(),
"get_sample": falcon.GetSampleV2(ids='12345678'),
"upload_sample": falcon.UploadSampleV2(body={}, parameters={}, file_data=''),
"delete_sample": falcon.DeleteSampleV2(ids='12345678'),
"query_sample": falcon.QuerySampleV1(sha256s='12345678')
}
for key in tests:
if tests[key]["status_code"] not in AllowedResponses:
error_checks = False
# print(f"{key} operation returned {tests[key]}")
return error_checks
def test_query_reports(self):
"""Pytest harness hook"""
assert bool(falcon.QueryReports(parameters={"limit": 1})["status_code"] in AllowedResponses) is True
def test_query_submissions(self):
"""Pytest harness hook"""
assert bool(falcon.QuerySubmissions(parameters={"limit": 1})["status_code"] in AllowedResponses) is True
@pytest.mark.skipif(falcon.QueryReports(parameters={"limit": 1})["status_code"] == 429, reason="API rate limit reached")
def test_get_summary_reports(self):
"""Pytest harness hook"""
id_lookup = falcon.query_reports(limit=1)
id_list = "1234567890"
if id_lookup["status_code"] not in [403, 404, 429]:
if id_lookup["body"]["resources"]:
id_list = id_lookup["body"]["resources"]
assert bool(falcon.GetSummaryReports(
ids=id_list
)["status_code"] in AllowedResponses) is True
def test_errors(self):
"""Pytest harness hook"""
assert self.falconx_generate_errors() is True
| 41
| 124
| 0.628425
|
9a19be8040981a73bfe71311bc242e1730dbdf9f
| 208
|
py
|
Python
|
primer/auth/urls.py
|
jamesmfriedman/django-primer
|
83dfc516212741d7593d1f6adeb774f4d30a4ad7
|
[
"Apache-2.0"
] | 2
|
2018-04-23T06:44:20.000Z
|
2019-02-05T16:43:17.000Z
|
primer/auth/urls.py
|
jamesmfriedman/django-primer
|
83dfc516212741d7593d1f6adeb774f4d30a4ad7
|
[
"Apache-2.0"
] | null | null | null |
primer/auth/urls.py
|
jamesmfriedman/django-primer
|
83dfc516212741d7593d1f6adeb774f4d30a4ad7
|
[
"Apache-2.0"
] | 2
|
2020-10-27T05:26:11.000Z
|
2021-03-24T18:10:20.000Z
|
from django.conf.urls import patterns, include, url
urlpatterns = patterns('primer.auth.views',
# accounts
url(r'^login/$', 'login', name='login'),
url(r'^logout/$', 'logout', name='logout'),
)
| 23.111111
| 51
| 0.639423
|
0a30a951d662eb3ccfd1fcb973a09688823053bb
| 1,447
|
py
|
Python
|
shortener/views.py
|
videetssinghai/URL_Shortner
|
14872c3c512c86b95491195e829b2148bb5b41c0
|
[
"MIT"
] | null | null | null |
shortener/views.py
|
videetssinghai/URL_Shortner
|
14872c3c512c86b95491195e829b2148bb5b41c0
|
[
"MIT"
] | null | null | null |
shortener/views.py
|
videetssinghai/URL_Shortner
|
14872c3c512c86b95491195e829b2148bb5b41c0
|
[
"MIT"
] | null | null | null |
from django.shortcuts import render, get_object_or_404
from django.http import request
from django.http import HttpResponse, HttpResponseRedirect
from .forms import urlForm
from .models import getBit
from django.views import View
class BasicView(View):
def get(self,request,*args,**kwargs):
form = urlForm()
context={
"form":form,
"title":"getBit"
}
return render(request,"shortner/home.html",context)
def post(self,request,*args,**kwargs):
form = urlForm(request.POST)
context = {
"form": form,
"title": "getBit.com"
}
template = "shortner/home.html"
if form.is_valid():
url = request.POST.get('url')
# noinspection PyUnresolvedReferences
obj, created = getBit.objects.get_or_create(url=url)
context = {
"object": obj,
"created": created
}
print request.POST.get('url')
if created:
print "created"
template = "shortner/success.html"
else :
print "no created"
template = "shortner/exists.html"
print template
return render(request,template,context)
def redirect_view(request,code=None,*args,**kwargs):
obj = get_object_or_404(getBit,shortcode=code)
return HttpResponseRedirect(obj.url)
| 28.372549
| 64
| 0.577747
|
b938266bd543a34027a698925fb555f726fcb856
| 16,306
|
py
|
Python
|
InvenTree/company/models.py
|
pmburu/InvenTree
|
2c1d585f8bcc6a961e323e0249755bb3968a4b5a
|
[
"MIT"
] | null | null | null |
InvenTree/company/models.py
|
pmburu/InvenTree
|
2c1d585f8bcc6a961e323e0249755bb3968a4b5a
|
[
"MIT"
] | null | null | null |
InvenTree/company/models.py
|
pmburu/InvenTree
|
2c1d585f8bcc6a961e323e0249755bb3968a4b5a
|
[
"MIT"
] | null | null | null |
"""
Company database model definitions
"""
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import os
import math
from decimal import Decimal
from django.utils.translation import gettext_lazy as _
from django.core.validators import MinValueValidator
from django.db import models
from django.db.models import Sum, Q
from django.apps import apps
from django.urls import reverse
from markdownx.models import MarkdownxField
from stdimage.models import StdImageField
from InvenTree.helpers import getMediaUrl, getBlankImage, getBlankThumbnail
from InvenTree.helpers import normalize
from InvenTree.fields import InvenTreeURLField, RoundingDecimalField
from InvenTree.status_codes import PurchaseOrderStatus
from common.models import Currency
def rename_company_image(instance, filename):
""" Function to rename a company image after upload
Args:
instance: Company object
filename: uploaded image filename
Returns:
New image filename
"""
base = 'company_images'
if filename.count('.') > 0:
ext = filename.split('.')[-1]
else:
ext = ''
fn = 'company_{pk}_img'.format(pk=instance.pk)
if ext:
fn += '.' + ext
return os.path.join(base, fn)
class Company(models.Model):
""" A Company object represents an external company.
It may be a supplier or a customer or a manufacturer (or a combination)
- A supplier is a company from which parts can be purchased
- A customer is a company to which parts can be sold
- A manufacturer is a company which manufactures a raw good (they may or may not be a "supplier" also)
Attributes:
name: Brief name of the company
description: Longer form description
website: URL for the company website
address: Postal address
phone: contact phone number
email: contact email address
link: Secondary URL e.g. for link to internal Wiki page
image: Company image / logo
notes: Extra notes about the company
is_customer: boolean value, is this company a customer
is_supplier: boolean value, is this company a supplier
is_manufacturer: boolean value, is this company a manufacturer
"""
class Meta:
ordering = ['name', ]
name = models.CharField(max_length=100, blank=False, unique=True,
help_text=_('Company name'),
verbose_name=_('Company name'))
description = models.CharField(max_length=500, verbose_name=_('Company description'), help_text=_('Description of the company'))
website = models.URLField(blank=True, verbose_name=_('Website'), help_text=_('Company website URL'))
address = models.CharField(max_length=200,
verbose_name=_('Address'),
blank=True, help_text=_('Company address'))
phone = models.CharField(max_length=50,
verbose_name=_('Phone number'),
blank=True, help_text=_('Contact phone number'))
email = models.EmailField(blank=True, verbose_name=_('Email'), help_text=_('Contact email address'))
contact = models.CharField(max_length=100,
verbose_name=_('Contact'),
blank=True, help_text=_('Point of contact'))
link = InvenTreeURLField(blank=True, help_text=_('Link to external company information'))
image = StdImageField(
upload_to=rename_company_image,
null=True,
blank=True,
variations={'thumbnail': (128, 128)},
delete_orphans=True,
)
notes = MarkdownxField(blank=True)
is_customer = models.BooleanField(default=False, help_text=_('Do you sell items to this company?'))
is_supplier = models.BooleanField(default=True, help_text=_('Do you purchase items from this company?'))
is_manufacturer = models.BooleanField(default=False, help_text=_('Does this company manufacture parts?'))
def __str__(self):
""" Get string representation of a Company """
return "{n} - {d}".format(n=self.name, d=self.description)
def get_absolute_url(self):
""" Get the web URL for the detail view for this Company """
return reverse('company-detail', kwargs={'pk': self.id})
def get_image_url(self):
""" Return the URL of the image for this company """
if self.image:
return getMediaUrl(self.image.url)
else:
return getBlankImage()
def get_thumbnail_url(self):
""" Return the URL for the thumbnail image for this Company """
if self.image:
return getMediaUrl(self.image.thumbnail.url)
else:
return getBlankThumbnail()
@property
def manufactured_part_count(self):
""" The number of parts manufactured by this company """
return self.manufactured_parts.count()
@property
def has_manufactured_parts(self):
return self.manufactured_part_count > 0
@property
def supplied_part_count(self):
""" The number of parts supplied by this company """
return self.supplied_parts.count()
@property
def has_supplied_parts(self):
""" Return True if this company supplies any parts """
return self.supplied_part_count > 0
@property
def parts(self):
""" Return SupplierPart objects which are supplied or manufactured by this company """
return SupplierPart.objects.filter(Q(supplier=self.id) | Q(manufacturer=self.id))
@property
def part_count(self):
""" The number of parts manufactured (or supplied) by this Company """
return self.parts.count()
@property
def has_parts(self):
return self.part_count > 0
@property
def stock_items(self):
""" Return a list of all stock items supplied or manufactured by this company """
stock = apps.get_model('stock', 'StockItem')
return stock.objects.filter(Q(supplier_part__supplier=self.id) | Q(supplier_part__manufacturer=self.id)).all()
@property
def stock_count(self):
""" Return the number of stock items supplied or manufactured by this company """
return self.stock_items.count()
def outstanding_purchase_orders(self):
""" Return purchase orders which are 'outstanding' """
return self.purchase_orders.filter(status__in=PurchaseOrderStatus.OPEN)
def pending_purchase_orders(self):
""" Return purchase orders which are PENDING (not yet issued) """
return self.purchase_orders.filter(status=PurchaseOrderStatus.PENDING)
def closed_purchase_orders(self):
""" Return purchase orders which are not 'outstanding'
- Complete
- Failed / lost
- Returned
"""
return self.purchase_orders.exclude(status__in=PurchaseOrderStatus.OPEN)
def complete_purchase_orders(self):
return self.purchase_orders.filter(status=PurchaseOrderStatus.COMPLETE)
def failed_purchase_orders(self):
""" Return any purchase orders which were not successful """
return self.purchase_orders.filter(status__in=PurchaseOrderStatus.FAILED)
class Contact(models.Model):
""" A Contact represents a person who works at a particular company.
A Company may have zero or more associated Contact objects.
Attributes:
company: Company link for this contact
name: Name of the contact
phone: contact phone number
email: contact email
role: position in company
"""
company = models.ForeignKey(Company, related_name='contacts',
on_delete=models.CASCADE)
name = models.CharField(max_length=100)
phone = models.CharField(max_length=100, blank=True)
email = models.EmailField(blank=True)
role = models.CharField(max_length=100, blank=True)
company = models.ForeignKey(Company, related_name='contacts',
on_delete=models.CASCADE)
class SupplierPart(models.Model):
""" Represents a unique part as provided by a Supplier
Each SupplierPart is identified by a MPN (Manufacturer Part Number)
Each SupplierPart is also linked to a Part object.
A Part may be available from multiple suppliers
Attributes:
part: Link to the master Part
supplier: Company that supplies this SupplierPart object
SKU: Stock keeping unit (supplier part number)
manufacturer: Company that manufactures the SupplierPart (leave blank if it is the sample as the Supplier!)
MPN: Manufacture part number
link: Link to external website for this part
description: Descriptive notes field
note: Longer form note field
base_cost: Base charge added to order independent of quantity e.g. "Reeling Fee"
multiple: Multiple that the part is provided in
lead_time: Supplier lead time
packaging: packaging that the part is supplied in, e.g. "Reel"
"""
def get_absolute_url(self):
return reverse('supplier-part-detail', kwargs={'pk': self.id})
class Meta:
unique_together = ('part', 'supplier', 'SKU')
# This model was moved from the 'Part' app
db_table = 'part_supplierpart'
part = models.ForeignKey('part.Part', on_delete=models.CASCADE,
related_name='supplier_parts',
verbose_name=_('Base Part'),
limit_choices_to={
'purchaseable': True,
'is_template': False,
},
help_text=_('Select part'),
)
supplier = models.ForeignKey(Company, on_delete=models.CASCADE,
related_name='supplied_parts',
limit_choices_to={'is_supplier': True},
help_text=_('Select supplier'),
)
SKU = models.CharField(max_length=100, help_text=_('Supplier stock keeping unit'))
manufacturer = models.ForeignKey(
Company,
on_delete=models.SET_NULL,
related_name='manufactured_parts',
limit_choices_to={'is_manufacturer': True},
help_text=_('Select manufacturer'),
null=True, blank=True
)
MPN = models.CharField(max_length=100, blank=True, help_text=_('Manufacturer part number'))
link = InvenTreeURLField(blank=True, help_text=_('URL for external supplier part link'))
description = models.CharField(max_length=250, blank=True, help_text=_('Supplier part description'))
note = models.CharField(max_length=100, blank=True, help_text=_('Notes'))
base_cost = models.DecimalField(max_digits=10, decimal_places=3, default=0, validators=[MinValueValidator(0)], help_text=_('Minimum charge (e.g. stocking fee)'))
packaging = models.CharField(max_length=50, blank=True, help_text=_('Part packaging'))
multiple = models.PositiveIntegerField(default=1, validators=[MinValueValidator(1)], help_text=('Order multiple'))
# TODO - Reimplement lead-time as a charfield with special validation (pattern matching).
# lead_time = models.DurationField(blank=True, null=True)
@property
def manufacturer_string(self):
""" Format a MPN string for this SupplierPart.
Concatenates manufacture name and part number.
"""
items = []
if self.manufacturer:
items.append(self.manufacturer.name)
if self.MPN:
items.append(self.MPN)
return ' | '.join(items)
@property
def has_price_breaks(self):
return self.price_breaks.count() > 0
@property
def price_breaks(self):
""" Return the associated price breaks in the correct order """
return self.pricebreaks.order_by('quantity').all()
@property
def unit_pricing(self):
return self.get_price(1)
def get_price(self, quantity, moq=True, multiples=True):
""" Calculate the supplier price based on quantity price breaks.
- Don't forget to add in flat-fee cost (base_cost field)
- If MOQ (minimum order quantity) is required, bump quantity
- If order multiples are to be observed, then we need to calculate based on that, too
"""
price_breaks = self.price_breaks.filter(quantity__lte=quantity)
# No price break information available?
if len(price_breaks) == 0:
return None
# Order multiples
if multiples:
quantity = int(math.ceil(quantity / self.multiple) * self.multiple)
pb_found = False
pb_quantity = -1
pb_cost = 0.0
for pb in self.price_breaks.all():
# Ignore this pricebreak (quantity is too high)
if pb.quantity > quantity:
continue
pb_found = True
# If this price-break quantity is the largest so far, use it!
if pb.quantity > pb_quantity:
pb_quantity = pb.quantity
# Convert everything to base currency
pb_cost = pb.converted_cost
if pb_found:
cost = pb_cost * quantity
return normalize(cost + self.base_cost)
else:
return None
def open_orders(self):
""" Return a database query for PO line items for this SupplierPart,
limited to purchase orders that are open / outstanding.
"""
return self.purchase_order_line_items.prefetch_related('order').filter(order__status__in=PurchaseOrderStatus.OPEN)
def on_order(self):
""" Return the total quantity of items currently on order.
Subtract partially received stock as appropriate
"""
totals = self.open_orders().aggregate(Sum('quantity'), Sum('received'))
# Quantity on order
q = totals.get('quantity__sum', 0)
# Quantity received
r = totals.get('received__sum', 0)
if q is None or r is None:
return 0
else:
return max(q - r, 0)
def purchase_orders(self):
""" Returns a list of purchase orders relating to this supplier part """
return [line.order for line in self.purchase_order_line_items.all().prefetch_related('order')]
@property
def pretty_name(self):
return str(self)
def __str__(self):
s = "{supplier} ({sku})".format(
sku=self.SKU,
supplier=self.supplier.name)
if self.manufacturer_string:
s = s + ' - ' + self.manufacturer_string
return s
class SupplierPriceBreak(models.Model):
""" Represents a quantity price break for a SupplierPart.
- Suppliers can offer discounts at larger quantities
- SupplierPart(s) may have zero-or-more associated SupplierPriceBreak(s)
Attributes:
part: Link to a SupplierPart object that this price break applies to
quantity: Quantity required for price break
cost: Cost at specified quantity
currency: Reference to the currency of this pricebreak (leave empty for base currency)
"""
part = models.ForeignKey(SupplierPart, on_delete=models.CASCADE, related_name='pricebreaks')
quantity = RoundingDecimalField(max_digits=15, decimal_places=5, default=1, validators=[MinValueValidator(1)])
cost = RoundingDecimalField(max_digits=10, decimal_places=5, validators=[MinValueValidator(0)])
currency = models.ForeignKey(Currency, blank=True, null=True, on_delete=models.SET_NULL)
@property
def converted_cost(self):
""" Return the cost of this price break, converted to the base currency """
scaler = Decimal(1.0)
if self.currency:
scaler = self.currency.value
return self.cost * scaler
class Meta:
unique_together = ("part", "quantity")
# This model was moved from the 'Part' app
db_table = 'part_supplierpricebreak'
def __str__(self):
return "{mpn} - {cost} @ {quan}".format(
mpn=self.part.MPN,
cost=self.cost,
quan=self.quantity)
| 34.112971
| 165
| 0.644855
|
e31ce9749adc905c2d892c375863b55d51643cad
| 2,860
|
py
|
Python
|
invoicing/crud/job_crud.py
|
dnegreira/Invoicing
|
0bc8133e989f095c10151f67482e249416274947
|
[
"MIT"
] | null | null | null |
invoicing/crud/job_crud.py
|
dnegreira/Invoicing
|
0bc8133e989f095c10151f67482e249416274947
|
[
"MIT"
] | null | null | null |
invoicing/crud/job_crud.py
|
dnegreira/Invoicing
|
0bc8133e989f095c10151f67482e249416274947
|
[
"MIT"
] | null | null | null |
from invoicing.actions.action_collection import ActionCollection
from invoicing.crud.base_crud import BaseCrud
from invoicing.models.job_model import JobModel
from invoicing.repository.job_repository import JobRepository
from invoicing.repository.status_repository import StatusRepository
from invoicing.ui.menu import Menu
from invoicing.ui.pagination import Pagination
from invoicing.ui.style import Style
from invoicing.value_validation.value_validation import Validation
class JobCrud(BaseCrud):
def __init__(self):
super().__init__('Jobs', JobRepository, JobModel)
def show_item_menu(self, id):
Menu.create(self.table_name + ' Menu', ActionCollection(
('Update Status', lambda: self.update_status(id)),
('Log Time', lambda: self.log_time(id))
))
def make_paginated_menu(self):
return self.paginated_menu(
find=self.repository.find_paginated_join_staff_and_status,
find_by_id=lambda id: self.repository.find_by_id(id, (
'id', 'reference_code', 'title', 'description',
'estimated_time', 'actual_time', 'deadline',
'assigned_to', 'status_id', 'project_id'
))
)
def log_time(self, job_id):
logged_time = ''
while not Validation.isFloat(logged_time):
logged_time = input('Log Time: ')
self.repository.update_actual_time(job_id, logged_time)
self.repository.save()
self.repository.check_rows_updated('Job Updated')
Menu.wait_for_input()
def update_status(self, job_id):
statusRepository = StatusRepository()
paginated_menu = Pagination(statusRepository)
status = paginated_menu(
find=statusRepository.find_paginated,
find_by_id=statusRepository.find_by_id
)
self.repository.update(job_id, {
'status_id': status['id'],
})
self.repository.save()
self.repository.check_rows_updated('Status Updated')
Menu.wait_for_input()
def edit_billable_time(self, job):
print("Estimated Time: " + job['estimated_time'])
print("Actual Time: " + job['actual_time'])
billable_time = input("Billable Time: ")
self.repository.update_billable_time(job['id'], billable_time)
self.repository.save()
self.repository.check_rows_updated('Job Updated')
Menu.wait_for_input()
def show_jobs_by_assigned_to(self, staff_id):
print(Style.create_title('Select job to log time'))
job = self.paginated_menu(
find=lambda limit, page: self.repository.find_paginated_by_assigned_to(staff_id, limit, page),
find_by_id=self.repository.find_by_id
)
if job:
self.show_item_detail(job)
self.show_item_menu(job['id'])
| 39.178082
| 106
| 0.669231
|
358f602b7443b9c2cfc585ae6a93b0b483df5251
| 12,979
|
py
|
Python
|
bitfield/__init__.py
|
mattcaldwell/django-bitfield
|
c7fbbfa79a61e68a7bce246a62b65ca5a4edf27f
|
[
"Apache-2.0"
] | 1
|
2015-11-04T16:24:57.000Z
|
2015-11-04T16:24:57.000Z
|
bitfield/__init__.py
|
mattcaldwell/django-bitfield
|
c7fbbfa79a61e68a7bce246a62b65ca5a4edf27f
|
[
"Apache-2.0"
] | null | null | null |
bitfield/__init__.py
|
mattcaldwell/django-bitfield
|
c7fbbfa79a61e68a7bce246a62b65ca5a4edf27f
|
[
"Apache-2.0"
] | null | null | null |
"""
django-bitfield
~~~~~~~~~~~~~~~
"""
try:
VERSION = __import__('pkg_resources') \
.get_distribution('bitfield').version
except Exception, e:
VERSION = 'unknown'
from django import forms
from django.db.models.sql.expressions import SQLEvaluator
from django.db.models.fields import Field, BigIntegerField
from django.db.models.fields.subclassing import Creator, SubfieldBase
class Bit(object):
"""
Represents a single Bit.
"""
def __init__(self, number, is_set=True):
self.number = number
self.is_set = bool(is_set)
self.mask = 2**int(number)
if not self.is_set:
self.mask = ~self.mask
def __repr__(self):
return '<%s: number=%d, is_set=%s>' % (self.__class__.__name__, self.number, self.is_set)
# def __str__(self):
# if self.is_set:
# return 'Yes'
# return 'No'
def __int__(self):
return self.mask
def __nonzero__(self):
return self.is_set
def __eq__(self, value):
if isinstance(value, Bit):
return value.number == self.number and value.is_set == self.is_set
elif isinstance(value, bool):
return value == self.is_set
elif isinstance(value, int):
return value == self.mask
return value == self.is_set
def __ne__(self, value):
return not self == value
def __coerce__(self, value):
return (self.is_set, bool(value))
def __invert__(self):
return self.__class__(self.number, not self.is_set)
def __and__(self, value):
if isinstance(value, Bit):
value = value.mask
return value & self.mask
def __rand__(self, value):
if isinstance(value, Bit):
value = value.mask
return self.mask & value
def __or__(self, value):
if isinstance(value, Bit):
value = value.mask
return value | self.mask
def __ror__(self, value):
if isinstance(value, Bit):
value = value.mask
return self.mask | value
def __lshift__(self, value):
if isinstance(value, Bit):
value = value.mask
return value << self.mask
def __rlshift__(self, value):
if isinstance(value, Bit):
value = value.mask
return self.mask << value
def __rshift__(self, value):
if isinstance(value, Bit):
value = value.mask
return value >> self.mask
def __rrshift__(self, value):
if isinstance(value, Bit):
value = value.mask
return self.mask >> value
def __xor__(self, value):
if isinstance(value, Bit):
value = value.mask
return value ^ self.mask
def __rxor__(self, value):
if isinstance(value, Bit):
value = value.mask
return self.mask ^ value
def __sentry__(self):
return repr(self)
def prepare(self, evaluator, query, allow_joins):
return self
def evaluate(self, evaluator, qn, connection):
return self.mask, []
class BitHandler(object):
"""
Represents an array of bits, each as a ``Bit`` object.
"""
def __init__(self, value, keys):
# TODO: change to bitarray?
if value:
self._value = int(value)
else:
self._value = 0
self._keys = keys
def __eq__(self, other):
if not isinstance(other, BitHandler):
return False
return self._value == other._value
def __repr__(self):
return '<%s: %s>' % (self.__class__.__name__, ', '.join('%s=%s' % (k, self.get_bit(n).is_set) for n, k in enumerate(self._keys)),)
def __str__(self):
return str(self._value)
def __int__(self):
return self._value
def __nonzero__(self):
return bool(self._value)
def __and__(self, value):
return BitHandler(self._value & int(value), self._keys)
def __or__(self, value):
return BitHandler(self._value | int(value), self._keys)
def __add__(self, value):
return BitHandler(self._value + int(value), self._keys)
def __sub__(self, value):
return BitHandler(self._value - int(value), self._keys)
def __lshift__(self, value):
return BitHandler(self._value << int(value), self._keys)
def __rshift__(self, value):
return BitHandler(self._value >> int(value), self._keys)
def __xor__(self, value):
return BitHandler(self._value ^ int(value), self._keys)
def __contains__(self, key):
bit_number = self._keys.index(key)
return bool(self.get_bit(bit_number))
def __getattr__(self, key):
if key.startswith('_'):
return object.__getattribute__(self, key)
if key not in self._keys:
raise AttributeError('%s is not a valid flag' % key)
return self.get_bit(self._keys.index(key))
def __setattr__(self, key, value):
if key.startswith('_'):
return object.__setattr__(self, key, value)
if key not in self._keys:
raise AttributeError('%s is not a valid flag' % key)
self.set_bit(self._keys.index(key), value)
def __iter__(self):
return self.iteritems()
def __sentry__(self):
return repr(self)
def _get_mask(self):
return self._value
mask = property(_get_mask)
def prepare(self, evaluator, query, allow_joins):
return self
def evaluate(self, evaluator, qn, connection):
return self.mask, []
def get_bit(self, bit_number):
mask = 2**int(bit_number)
return Bit(bit_number, self._value & mask != 0)
def set_bit(self, bit_number, true_or_false):
mask = 2**int(bit_number)
if true_or_false:
self._value |= mask
else:
self._value &= (~mask)
return Bit(bit_number, self._value & mask != 0)
def keys(self):
return self._keys
def iterkeys(self):
return iter(self._keys)
def items(self):
return list(self.iteritems())
def iteritems(self):
for k in self._keys:
yield (k, getattr(self, k).is_set)
class BitFormField(forms.IntegerField):
def __init__(self, *args, **kwargs):
super(BitFormField, self).__init__(*args, **kwargs)
def clean(self, value):
value = int(value)
return super(BitFormField, self).clean(value)
class BitFieldFlags(object):
def __init__(self, flags):
self._flags = flags
def __repr__(self):
return repr(self._flags)
def __iter__(self):
for flag in self._flags:
yield flag
def __getattr__(self, key):
if key not in self._flags:
raise AttributeError
return Bit(self._flags.index(key))
def iteritems(self):
for flag in self._flags:
yield flag, Bit(self._flags.index(flag))
def iterkeys(self):
for flag in self._flags:
yield flag
def itervalues(self):
for flag in self._flags:
yield Bit(self._flags.index(flag))
def items(self):
return list(self.iteritems())
def keys(self):
return list(self.iterkeys())
def values(self):
return list(self.itervalues())
class BitFieldCreator(Creator):
"""
Descriptor for BitFields. Checks to make sure that all flags of the
instance match the class. This is to handle the case when caching
an older version of the instance and a newer version of the class is
available (usually during deploys).
"""
def __get__(self, obj, type=None):
if obj is None:
return BitFieldFlags(self.field.flags)
retval = obj.__dict__[self.field.name]
if self.field.__class__ is BitField:
# Update flags from class in case they've changed.
retval._keys = self.field.flags
return retval
class BitQueryLookupWrapper(object):
def __init__(self, alias, column, bit):
self.table_alias = alias
self.column = column
self.bit = bit
def as_sql(self, qn, connection=None):
"""
Create the proper SQL fragment. This inserts something like
"(T0.flags & value) != 0".
This will be called by Where.as_sql()
"""
if self.bit:
return ("(%s.%s | %d)" % (qn(self.table_alias), qn(self.column), self.bit.mask),
[])
return ("(%s.%s & %d)" % (qn(self.table_alias), qn(self.column), self.bit.mask),
[])
class BitQuerySaveWrapper(BitQueryLookupWrapper):
def as_sql(self, qn, connection):
"""
Create the proper SQL fragment. This inserts something like
"(T0.flags & value) != 0".
This will be called by Where.as_sql()
"""
engine = connection.settings_dict['ENGINE'].rsplit('.', -1)[-1]
if engine.startswith('postgres'):
XOR_OPERATOR = '#'
elif engine.startswith('sqlite'):
raise NotImplementedError
else:
XOR_OPERATOR = '^'
if self.bit:
return ("%s.%s | %d" % (qn(self.table_alias), qn(self.column), self.bit.mask),
[])
return ("%s.%s %s %d" % (qn(self.table_alias), qn(self.column), XOR_OPERATOR, self.bit.mask),
[])
class BitFieldMeta(SubfieldBase):
"""
Modified SubFieldBase to use our contribute_to_class method (instead of
monkey-patching make_contrib). This uses our BitFieldCreator descriptor
in place of the default.
NOTE: If we find ourselves needing custom descriptors for fields, we could
make this generic.
"""
def __new__(cls, name, bases, attrs):
def contribute_to_class(self, cls, name):
BigIntegerField.contribute_to_class(self, cls, name)
setattr(cls, self.name, BitFieldCreator(self))
new_class = super(BitFieldMeta, cls).__new__(cls, name, bases, attrs)
new_class.contribute_to_class = contribute_to_class
return new_class
class BitField(BigIntegerField):
__metaclass__ = BitFieldMeta
def __init__(self, flags, *args, **kwargs):
BigIntegerField.__init__(self, *args, **kwargs)
self.flags = flags
def south_field_triple(self):
"Returns a suitable description of this field for South."
from south.modelsinspector import introspector
field_class = "django.db.models.fields.BigIntegerField"
args, kwargs = introspector(self)
return (field_class, args, kwargs)
def formfield(self, form_class=BitFormField, **kwargs):
return Field.formfield(self, form_class, **kwargs)
def pre_save(self, instance, add):
value = getattr(instance, self.attname)
return value
def get_prep_value(self, value):
if isinstance(value, (BitHandler, Bit)):
value = value.mask
return int(value)
# def get_db_prep_save(self, value, connection):
# if isinstance(value, Bit):
# return BitQuerySaveWrapper(self.model._meta.db_table, self.name, value)
# return super(BitField, self).get_db_prep_save(value, connection=connection)
def get_db_prep_lookup(self, lookup_type, value, connection, prepared=False):
if isinstance(value, SQLEvaluator) and isinstance(value.expression, Bit):
value = value.expression
if isinstance(value, (BitHandler, Bit)):
return BitQueryLookupWrapper(self.model._meta.db_table, self.name, value)
return BigIntegerField.get_db_prep_lookup(self, lookup_type=lookup_type, value=value,
connection=connection, prepared=prepared)
def get_prep_lookup(self, lookup_type, value):
if isinstance(value, SQLEvaluator) and isinstance(value.expression, Bit):
value = value.expression
if isinstance(value, Bit):
if lookup_type in ('exact',):
return value
raise TypeError('Lookup type %r not supported with `Bit` type.' % lookup_type)
return BigIntegerField.get_prep_lookup(self, lookup_type, value)
def to_python(self, value):
if isinstance(value, Bit):
value = value.mask
if not isinstance(value, BitHandler):
# Regression for #1425: fix bad data that was created resulting
# in negative values for flags. Compute the value that would
# have been visible ot the application to preserve compatibility.
if isinstance(value, (int, long)) and value < 0:
new_value = 0
for bit_number, _ in enumerate(self.flags):
new_value |= (value & (2**bit_number))
value = new_value
value = BitHandler(value, self.flags)
else:
# Ensure flags are consistent for unpickling
value._keys = self.flags
return value
| 31.350242
| 138
| 0.608367
|
6bab7798f0876b96b83cb9947c420e6c6293ea7b
| 8,775
|
py
|
Python
|
KDEBoundaries1D.py
|
jabamar/KernelDensityBoundaries1D
|
5016ab240a06c0c5ac9ff2ce335481053b40445d
|
[
"BSD-3-Clause"
] | null | null | null |
KDEBoundaries1D.py
|
jabamar/KernelDensityBoundaries1D
|
5016ab240a06c0c5ac9ff2ce335481053b40445d
|
[
"BSD-3-Clause"
] | null | null | null |
KDEBoundaries1D.py
|
jabamar/KernelDensityBoundaries1D
|
5016ab240a06c0c5ac9ff2ce335481053b40445d
|
[
"BSD-3-Clause"
] | null | null | null |
"""
Kernel Density Estimation (only 1D)
-------------------------
"""
from sklearn.base import BaseEstimator
from scipy.interpolate import interp1d
import numpy as np
from scipy import stats
KERNELS = ['gaussian', 'tophat', 'epanechnikov', 'expo', 'linear']
BOUNDARY = ['reflection', 'CowlingHall']
MINIMUM_VALUE = 1e-40
def GAUSSKERN(xi, mean, bw, npts):
""" GAUSSIAN KERNEL
Parameters
__________
xi: float
x range value
mean: float or list of floats
mean values for the gaussian (values of the sample)
bw: float
bandwitdh
npts: integer
number of points
"""
return np.divide(np.sum(stats.norm.pdf(xi, loc=mean, scale=bw)), npts)
def TOPHATKERN(xi, val, bw, npts):
return np.divide((np.abs(xi - val) < bw).sum(), npts*bw*2)
def EPANECHNIKOVKERN(xi, val, bw, npts):
temparray = np.abs(xi - val)
temparray = temparray[temparray < bw]
return np.divide(np.sum(1-(np.power(temparray/bw, 2))), npts*bw*4/3)
def EXPOKERN(xi, val, bw, npts):
return np.divide(np.sum(np.exp(-np.abs(xi-val)/bw)), npts*bw*2)
def LINEARKERN(xi, val, bw, npts):
temparray = np.abs(xi - val)
temparray = temparray[temparray < bw]
return np.divide(np.sum(1-(temparray)/bw), npts*bw)
class KernelDensityBoundaries1D(BaseEstimator):
"""A Kernel Density Estimator with boundary corrections
Parameters
----------
kernel : string
kernel to use. Default: gaussian
range: list of floats
List of dimension 2 [Xmin, Xmax] which gives the minimum/maximum value
of the distribution to estimate
bandwith: float
Smoothing parameter for the kernel
boundary: string
if None, it calculates the KDE ignoring any boundary condition
if "reflection", it applies a reflection at both ends of range (see
http://www.ton.scphys.kyoto-u.ac.jp/~shino/toolbox/reflectedkernel/reflectedkernel.html).
If "CowlingHall", The Cowling and Hall method as shown in
DOI: 10.1103/PhysRevD.97.115047 (original at
ttps://www.jstor.org/stable/2345893 ) is performed.
n_approx : int
number of points for the spline. A spline is used if n_approx >= 2,
otherwise this is ignored.
"""
def __init__(self, kernel="gaussian", range=None, bandwidth=1,
boundary=None, n_approx=-1):
"""
Called when initializing the classifier
"""
self.kernel = kernel
self.range = range
self.n_approx = n_approx
self.bandwidth = bandwidth
self.boundary = boundary
self.cdf_values = None
self.xrangecdf = None
if self.kernel not in KERNELS:
raise RuntimeError("Kernel not valid!")
if self.boundary is not None and self.boundary not in BOUNDARY:
raise RuntimeError("Boundary condition not valid!")
if self.n_approx >= 2 and self.range is None:
raise RuntimeError("Provide a valid boundary for the spline")
self.do_spline = True if self.n_approx >= 2 else False
def fit(self, X, y=None):
"""
Fit the 1D Kernel Density Estimator
Parameters
----------
X : array_like, shape (n_samples, n_features=1)
List of 1-dimensional data points. Each row
corresponds to a single data point.
"""
if X.shape[1] != 1:
raise RuntimeError("only valid for 1D!")
self.Xvalues_ = X.copy()
# Generate Pseudodata points for Cowling-Hall
if self.boundary == "CowlingHall":
Xmin, Xmax = self.range
Npts = int((self.Xvalues_.shape[0])/3)
sortpts = np.sort(self.Xvalues_.copy(), axis=0)
self.Xpseudodata_ = 4*Xmin - 6*sortpts[:Npts] + \
4*sortpts[1:2*Npts:2] - sortpts[2:3*Npts:3]
if self.n_approx >= 2:
Xmin, Xmax = self.range
if Xmin >= Xmax:
raise RuntimeError("Xmin must be smaller than Xmax!")
Xvals = np.linspace(Xmin, Xmax, self.n_approx)
self.interpol_ = interp1d(Xvals,
[self.eval(xi, False) for xi in Xvals],
kind="cubic", fill_value=MINIMUM_VALUE,
bounds_error=False)
return self
def eval(self, xi, usespline=False):
""" Evaluate point by point
Parameters
----------
xi : float
point where the density is evaluated
usespline : bool
whether the spline is considered or not
"""
KERNEL = None
samplevalues = self.Xvalues_
bw = self.bandwidth
npts = self.Xvalues_.shape[0]
# If spline, we return the interpolation and we skip the rest
if usespline:
return np.float32(self.interpol_(xi))
# Xmin, Xmax = self.range
# if xi >= Xmin and xi <= Xmax:
# return float(self.interpol_(xi))
# else:
# return MINIMUM_VALUE
# Choose kernel
if self.kernel == "gaussian":
KERNEL = GAUSSKERN
elif self.kernel == "tophat":
KERNEL = TOPHATKERN
elif self.kernel == "epanechnikov":
KERNEL = EPANECHNIKOVKERN
elif self.kernel == "expo":
KERNEL = EXPOKERN
elif self.kernel == "linear":
KERNEL = LINEARKERN
if self.boundary is None:
returnval = KERNEL(xi, samplevalues, bw, npts)
elif self.boundary == "reflection":
Xmin, Xmax = self.range
if xi >= Xmin and xi <= Xmax:
returnval = KERNEL(xi, samplevalues, bw, npts) + \
KERNEL(2*Xmin - xi, samplevalues, bw, npts) + \
KERNEL(2*Xmax - xi, samplevalues, bw, npts)
else:
returnval = MINIMUM_VALUE
elif self.boundary == "CowlingHall":
if xi >= Xmin and xi <= Xmax:
returnval = KERNEL(xi, samplevalues, bw, npts) + \
KERNEL(xi, self.Xpseudodata_, bw, npts)
else:
returnval = MINIMUM_VALUE
return returnval if returnval > MINIMUM_VALUE else MINIMUM_VALUE
def score_samples(self, X, y=None):
"""Evaluate the density model on the data.
Parameters
----------
X : array_like, shape (n_samples, n_features=1)
An array of points to query. Last dimension must be 1.
Returns
-------
density : ndarray, shape (n_samples,)
The array of density evaluations.
"""
if self.do_spline:
return self.interpol_(X)
else:
return [self.eval(xi, False) for xi in X]
def score(self, X, y=None):
"""Evaluates the total log probability for the array X
(as done in the sklearn class)
Parameters
----------
X : array_like, shape (n_samples, n_features=1)
An array of points to query. Last dimension must be 1.
"""
return(sum(np.log(self.score_samples(X))))
def cdf(self, npoints=100, xrange=None):
"""Generates the cumulative density function
Parameters
----------
npoints : number of points used to calculate the cdf.
Default is 100
xrange : points for which the cdf is calculated
(Optional, default is None)
returns two arrays:
self.cdf_values contains the cdf values for the points in
self.xrangecdf (which is also returned)
"""
if self.Xvalues_ is None :
raise RuntimeError("KDE has not been fitted!")
if xrange is None:
Xmin, Xmax = self.range
xrange = np.linspace(Xmin, Xmax, npoints)
else:
npoints = len(xrange)
Xmin = xrange[0]
Xmax = xrange[npoints-1]
binwidth = (Xmax-Xmin)/npoints
self.cdf_values = np.cumsum(self.score_samples(xrange[:, np.newaxis]))*binwidth
self.cdf_values /= self.cdf_values[npoints-1]
self.xrangecdf = xrange
return self.cdf_values, self.xrangecdf
def generate_random(self, size=1, nxpoints=1000):
"""Generates random numbers from the KDE
Parameters
----------
size: Number of values to be generated
"""
if self.cdf_values is None or self.xrangecdf is None:
self.cdf(npoints=nxpoints)
val_uniform = np.random.uniform(size=size)
corresponding_bins = np.searchsorted(self.cdf_values, val_uniform)
return self.xrangecdf[corresponding_bins]
| 30.154639
| 97
| 0.57641
|
1fd43303ee0209f20c60ceb3f6c1efa66ea53d7f
| 8,302
|
py
|
Python
|
src/daain/data/datasets/labels/bdd100k.py
|
merantix/mxlabs-daain
|
0e87df5dd6e678939374dfadf44fc360d34425bb
|
[
"Apache-2.0"
] | 14
|
2021-06-01T08:34:09.000Z
|
2022-02-01T15:45:27.000Z
|
src/daain/data/datasets/labels/bdd100k.py
|
merantix/mxlabs-daain
|
0e87df5dd6e678939374dfadf44fc360d34425bb
|
[
"Apache-2.0"
] | null | null | null |
src/daain/data/datasets/labels/bdd100k.py
|
merantix/mxlabs-daain
|
0e87df5dd6e678939374dfadf44fc360d34425bb
|
[
"Apache-2.0"
] | 2
|
2021-07-31T01:58:40.000Z
|
2022-01-24T14:04:26.000Z
|
"""Label definition.
Taken from https://raw.githubusercontent.com/bdd100k/bdd100k/master/bdd100k/label/label.py on the 2021-02-03
"""
from collections import namedtuple
# a label and all meta information
# Code inspired by Cityscapes https://github.com/mcordts/cityscapesScripts
Label = namedtuple(
"Label",
[
"name", # The identifier of this label, e.g. 'car', 'person', ... .
# We use them to uniquely name a class
"id", # An integer ID that is associated with this label.
# The IDs are used to represent the label in ground truth images An ID
# of -1 means that this label does not have an ID and thus is ignored
# when creating ground truth images (e.g. license plate). Do not modify
# these IDs, since exactly these IDs are expected by the evaluation
# server.
"trainId",
# Feel free to modify these IDs as suitable for your method. Then
# create ground truth images with trainer IDs, using the tools provided
# in the 'preparation' folder. However, make sure to validate or submit
# results to our evaluation server using the regular IDs above! For
# trainIds, multiple labels might have the same ID. Then, these labels
# are mapped to the same class in the ground truth images. For the
# inverse mapping, we use the label that is defined first in the list
# below. For example, mapping all void-type classes to the same ID in
# training, might make sense for some approaches. Max value is 255!
"category", # The name of the category that this label belongs to
"categoryId",
# The ID of this category. Used to create ground truth images
# on category level.
"hasInstances",
# Whether this label distinguishes between single instances or not
"ignoreInEval",
# Whether pixels having this class as ground truth label are ignored
# during evaluations or not
"color", # The color of this label
],
)
# Our extended list of label types. Our trainer id is compatible with Cityscapes
labels = [
# name id trainId category catId
# hasInstances ignoreInEval color
Label("unlabeled", 0, 255, "void", 0, False, True, (0, 0, 0)),
Label("dynamic", 1, 255, "void", 0, False, True, (111, 74, 0)),
Label("ego vehicle", 2, 255, "void", 0, False, True, (0, 0, 0)),
Label("ground", 3, 255, "void", 0, False, True, (81, 0, 81)),
Label("static", 4, 255, "void", 0, False, True, (0, 0, 0)),
Label("parking", 5, 255, "flat", 1, False, True, (250, 170, 160)),
Label("rail track", 6, 255, "flat", 1, False, True, (230, 150, 140)),
Label("road", 7, 0, "flat", 1, False, False, (128, 64, 128)),
Label("sidewalk", 8, 1, "flat", 1, False, False, (244, 35, 232)),
Label("bridge", 9, 255, "construction", 2, False, True, (150, 100, 100)),
Label("building", 10, 2, "construction", 2, False, False, (70, 70, 70)),
Label("fence", 11, 4, "construction", 2, False, False, (190, 153, 153)),
Label("garage", 12, 255, "construction", 2, False, True, (180, 100, 180)),
Label("guard rail", 13, 255, "construction", 2, False, True, (180, 165, 180)),
Label("tunnel", 14, 255, "construction", 2, False, True, (150, 120, 90)),
Label("wall", 15, 3, "construction", 2, False, False, (102, 102, 156)),
Label("banner", 16, 255, "object", 3, False, True, (250, 170, 100)),
Label("billboard", 17, 255, "object", 3, False, True, (220, 220, 250)),
Label("lane divider", 18, 255, "object", 3, False, True, (255, 165, 0)),
Label("parking sign", 19, 255, "object", 3, False, False, (220, 20, 60)),
Label("pole", 20, 5, "object", 3, False, False, (153, 153, 153)),
Label("polegroup", 21, 255, "object", 3, False, True, (153, 153, 153)),
Label("street light", 22, 255, "object", 3, False, True, (220, 220, 100)),
Label("traffic cone", 23, 255, "object", 3, False, True, (255, 70, 0)),
Label("traffic device", 24, 255, "object", 3, False, True, (220, 220, 220)),
Label("traffic light", 25, 6, "object", 3, False, False, (250, 170, 30)),
Label("traffic sign", 26, 7, "object", 3, False, False, (220, 220, 0)),
Label("traffic sign frame", 27, 255, "object", 3, False, True, (250, 170, 250),),
Label("terrain", 28, 9, "nature", 4, False, False, (152, 251, 152)),
Label("vegetation", 29, 8, "nature", 4, False, False, (107, 142, 35)),
Label("sky", 30, 10, "sky", 5, False, False, (70, 130, 180)),
Label("person", 31, 11, "human", 6, True, False, (220, 20, 60)),
Label("rider", 32, 12, "human", 6, True, False, (255, 0, 0)),
Label("bicycle", 33, 18, "vehicle", 7, True, False, (119, 11, 32)),
Label("bus", 34, 15, "vehicle", 7, True, False, (0, 60, 100)),
Label("car", 35, 13, "vehicle", 7, True, False, (0, 0, 142)),
Label("caravan", 36, 255, "vehicle", 7, True, True, (0, 0, 90)),
Label("motorcycle", 37, 17, "vehicle", 7, True, False, (0, 0, 230)),
Label("trailer", 38, 255, "vehicle", 7, True, True, (0, 0, 110)),
Label("trainer", 39, 16, "vehicle", 7, True, False, (0, 80, 100)),
Label("truck", 40, 14, "vehicle", 7, True, False, (0, 0, 70)),
]
# --------------------------------------------------------------------------------
# Create dictionaries for a fast lookup
# --------------------------------------------------------------------------------
# Please refer to the main method below for example usages!
# name to label object
name2label = {label.name: label for label in labels}
# id to label object
id2label = {label.id: label for label in labels}
# trainId to label object
trainId2label = {label.trainId: label for label in reversed(labels)}
# category to list of label objects
category2labels = {}
for label in labels:
category = label.category
if category in category2labels:
category2labels[category].append(label)
else:
category2labels[category] = [label]
id2trainId = {label.id: label.trainId for label in labels}
# --------------------------------------------------------------------------------
# Main for testing
# --------------------------------------------------------------------------------
# just a dummy main
if __name__ == "__main__":
# Print all the labels
print("List of cityscapes labels:")
print("")
print(
" {:>21} | {:>3} | {:>7} | {:>14} | {:>10} | {:>12} | {:>12}".format(
"name", "id", "trainId", "category", "categoryId", "hasInstances", "ignoreInEval"
),
)
print(" " + ("-" * 98))
for label in labels:
print(
" {:>21} | {:>3} | {:>7} | {:>14} | {:>10} | {:>12} | {:>12}".format(
label.name,
label.id,
label.trainId,
label.category,
label.categoryId,
label.hasInstances,
label.ignoreInEval,
),
)
print("")
print("Example usages:")
# Map from name to label
name = "car"
id = name2label[name].id
print("ID of label '{name}': {id}".format(name=name, id=id))
# Map from ID to label
category = id2label[id].category
print("Category of label with ID '{id}': {category}".format(id=id, category=category))
# Map from trainID to label
trainId = 0
name = trainId2label[trainId].name
print("Name of label with trainID '{id}': {name}".format(id=trainId, name=name))
# Number of non-background labels
print(f"Number of non-background labels {len([l for l in labels if l.trainId != 255])}")
# Check compatibility with Cityscape dataset
from research_projects.shared.datasets.labels.cityscape import name2label as cs_name2label
def bdd100k_label_compatible_with_cityscape(label_):
""" Basic check if they map the trainId to the same values, and if not it should be the background trainId
"""
if label_.name in cs_name2label:
return label_.trainId == cs_name2label[label_.name].trainId
else:
return label_.trainId == 255
if all(bdd100k_label_compatible_with_cityscape(label_) for label_ in labels):
print("They are compatible")
else:
print("They are not compatible")
| 46.379888
| 114
| 0.581908
|
c603b28f1d8b84176ecc82fd6eb472a0104bf3dc
| 2,053
|
py
|
Python
|
python/tvm/relax/__init__.py
|
jwfromm/relax
|
f120282007778706199243ee88b50697c2b9550c
|
[
"Apache-2.0"
] | null | null | null |
python/tvm/relax/__init__.py
|
jwfromm/relax
|
f120282007778706199243ee88b50697c2b9550c
|
[
"Apache-2.0"
] | null | null | null |
python/tvm/relax/__init__.py
|
jwfromm/relax
|
f120282007778706199243ee88b50697c2b9550c
|
[
"Apache-2.0"
] | null | null | null |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=invalid-name, wrong-import-position
"""The Relax IR namespace containing the IR, type, operator, and builder."""
from . import exec_builder
from . import expr
from . import ty
from . import vm
from . import block_builder
from . import op
from . import analysis
from . import transform
# Expr
Expr = expr.Expr
Span = expr.Span
SourceName = expr.SourceName
Id = expr.Id
GlobalVar = expr.GlobalVar
Var = expr.Var
DataflowVar = expr.DataflowVar
Binding = expr.Binding
MatchShape = expr.MatchShape
VarBinding = expr.VarBinding
BindingBlock = expr.BindingBlock
DataflowBlock = expr.DataflowBlock
SeqExpr = expr.SeqExpr
ShapeExpr = expr.ShapeExpr
Tuple = expr.Tuple
TupleGetItem = expr.TupleGetItem
Function = expr.Function
ExternFunc = expr.ExternFunc
Call = expr.Call
If = expr.If
# helper functions
const = expr.const
extern = expr.extern
te_tensor = expr.te_tensor
# Type
Type = ty.Type
ShapeType = ty.ShapeType
DynTensorType = ty.DynTensorType
DimType = ty.DimType
# VM
ExecBuilder = exec_builder.ExecBuilder
VirtualMachine = vm.VirtualMachine
load_exec_from_file = vm.load_exec_from_file
# Operator
from .op.base import call_tir
from .op.op_attrs import AllocStorageAttrs, AllocTensorAttrs
# IRBuilder
BlockBuilder = block_builder.BlockBuilder
| 28.513889
| 76
| 0.783244
|
ba0a26999c3f9d297560212a7a23dd90183bc511
| 781
|
py
|
Python
|
dialogos/quotes/models.py
|
bertucho/epic-movie-quotes-quiz
|
09e4ec58a441ab74c1ce6e0fde4e71b08a4d7250
|
[
"MIT"
] | null | null | null |
dialogos/quotes/models.py
|
bertucho/epic-movie-quotes-quiz
|
09e4ec58a441ab74c1ce6e0fde4e71b08a4d7250
|
[
"MIT"
] | null | null | null |
dialogos/quotes/models.py
|
bertucho/epic-movie-quotes-quiz
|
09e4ec58a441ab74c1ce6e0fde4e71b08a4d7250
|
[
"MIT"
] | null | null | null |
# Stdlib imports
from django.db import models
# Create your models here.
class Movie(models.Model):
title = models.CharField(max_length=200)
original_title = models.CharField(max_length=200,default="")
year = models.IntegerField()
imdb_score = models.DecimalField(max_digits = 3, decimal_places = 1)
votes = models.IntegerField()
posterPath = models.CharField(max_length=200)
def __unicode__(self):
return self.title
class Quote(models.Model):
movie = models.ForeignKey(Movie)
text = models.CharField(max_length=1024)
mf_visits = models.IntegerField()
score = models.DecimalField(max_digits = 4, decimal_places = 1)
popularity_rank = models.DecimalField(max_digits = 6, decimal_places = 1)
level = models.IntegerField()
def __unicode__(self):
return self.text
| 31.24
| 74
| 0.768246
|
5d875fbe12335e40ce998b3093d4a4ee930a278c
| 3,910
|
py
|
Python
|
tools/android/aar_resources_extractor.py
|
FengRillian/bazel-0.10-dist
|
67b66fa37dad709340d676feef0b655774bad5f6
|
[
"Apache-2.0"
] | null | null | null |
tools/android/aar_resources_extractor.py
|
FengRillian/bazel-0.10-dist
|
67b66fa37dad709340d676feef0b655774bad5f6
|
[
"Apache-2.0"
] | null | null | null |
tools/android/aar_resources_extractor.py
|
FengRillian/bazel-0.10-dist
|
67b66fa37dad709340d676feef0b655774bad5f6
|
[
"Apache-2.0"
] | 1
|
2021-05-15T20:45:59.000Z
|
2021-05-15T20:45:59.000Z
|
# pylint: disable=g-direct-third-party-import
# Copyright 2017 The Bazel Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A tool for extracting resource files from an AAR.
An AAR may contain resources under the /res directory. This tool extracts all
of the resources into a directory. If no resources exist, it creates an
empty.xml file that defines no resources.
In the future, this script may be extended to also extract assets.
"""
import os
import sys
import zipfile
from tools.android import junction
from third_party.py import gflags
FLAGS = gflags.FLAGS
gflags.DEFINE_string("input_aar", None, "Input AAR")
gflags.MarkFlagAsRequired("input_aar")
gflags.DEFINE_string("output_res_dir", None, "Output resources directory")
gflags.MarkFlagAsRequired("output_res_dir")
def ExtractResources(aar, output_res_dir):
"""Extract resource from an `aar` file to the `output_res_dir` directory."""
aar_contains_no_resources = True
output_res_dir_abs = os.path.abspath(output_res_dir)
for name in aar.namelist():
if name.startswith("res/"):
if os.name == "nt":
fullpath = os.path.normpath(os.path.join(output_res_dir_abs, name))
if name[-1] == "/":
# The zip entry is a directory. Create a junction to it, which also
# takes care of creating the directory and all of its parents in a
# longpath-safe manner.
# We must pretend to have extracted this directory, even if it's
# empty, therefore we mustn't rely on creating it as a parent
# directory of a subsequently extracted zip entry (because there may
# be no such subsequent entry).
with junction.TempJunction(fullpath.rstrip("/")) as juncpath:
pass
else:
# The zip entry is a file. Create a junction to its parent directory,
# then open the compressed entry as a file object, so we can extract
# the data even if the extracted file's path would be too long.
# The tradeoff is that we lose the permission bits of the compressed
# file, but Unix permissions don't mean much on Windows anyway.
with junction.TempJunction(os.path.dirname(fullpath)) as juncpath:
extracted_path = os.path.join(juncpath, os.path.basename(fullpath))
with aar.open(name) as src_fd:
with open(extracted_path, "wb") as dest_fd:
dest_fd.write(src_fd.read())
else:
aar.extract(name, output_res_dir)
aar_contains_no_resources = False
if aar_contains_no_resources:
empty_xml_filename = output_res_dir + "/res/values/empty.xml"
if os.name == "nt":
# Create a junction to the parent directory, because its path might be too
# long. Creating the junction also creates all parent directories.
with junction.TempJunction(os.path.dirname(empty_xml_filename)) as junc:
xmlpath = os.path.join(junc, os.path.basename(empty_xml_filename))
with open(xmlpath, "wb") as empty_xml:
empty_xml.write(b"<resources/>")
else:
os.makedirs(os.path.dirname(empty_xml_filename))
with open(empty_xml_filename, "wb") as empty_xml:
empty_xml.write(b"<resources/>")
def main():
with zipfile.ZipFile(FLAGS.input_aar, "r") as aar:
ExtractResources(aar, FLAGS.output_res_dir)
if __name__ == "__main__":
FLAGS(sys.argv)
main()
| 41.157895
| 80
| 0.702558
|
8451af28a5abee0f1ff3a1c465c0ba6734ea0421
| 10,108
|
py
|
Python
|
core/plugins/kickban.py
|
TheArchives/Nexus
|
5482def8b50562fdbae980cda9b1708bfad8bffb
|
[
"BSD-2-Clause"
] | 1
|
2021-04-06T18:54:31.000Z
|
2021-04-06T18:54:31.000Z
|
core/plugins/kickban.py
|
TheArchives/Nexus
|
5482def8b50562fdbae980cda9b1708bfad8bffb
|
[
"BSD-2-Clause"
] | null | null | null |
core/plugins/kickban.py
|
TheArchives/Nexus
|
5482def8b50562fdbae980cda9b1708bfad8bffb
|
[
"BSD-2-Clause"
] | 1
|
2021-12-20T18:11:25.000Z
|
2021-12-20T18:11:25.000Z
|
# The Nexus software is licensed under the BSD 2-Clause license.
#
# You should have recieved a copy of this license with the software.
# If you did not, you can find one at the following link.
#
# http://opensource.org/licenses/bsd-license.php
import logging
from core.plugins import ProtocolPlugin
from core.decorators import *
from core.constants import *
from core.server import *
class KickBanPlugin(ProtocolPlugin):
commands = {
"ban": "commandBan",
"banb": "commandBanBoth",
"ipban": "commandIpban",
"ipreason": "commandIpreason",
"kick": "commandKick",
"mkick": "commandMassKick",
"masskick": "commandMassKick",
"banreason": "commandReason",
"unban": "commandUnban",
"unipban": "commandUnipban",
"banned": "commandBanned",
"freeze": "commandFreeze",
"stop": "commandFreeze",
"unfreeze": "commandUnFreeze",
"defreeze": "commandUnFreeze",
"unstop": "commandUnFreeze",
#"ipshun": "commandIpshun",
#"unipshun": "commandUnipshun",
#"ipspec": "commandIpshun",
#"unipspec": "commandUnipshun",
}
@player_list
@admin_only
def commandBanned(self, parts, fromloc, overriderank):
"/banned [page] - Admin\nShows who is Banned."
if len(parts)==2:
try:
page = int(parts[1])
except ValueError:
self.client.sendServerMessage("Page must be a Number.")
return
else:
page = 1
bannedNames = []
for element in self.client.factory.banned.keys():
bannedNames.append(element)
if len(bannedNames) > 0:
bannedNames.sort()
self.client.sendServerPagedList("Banned:", bannedNames, page)
else:
self.client.sendServerList(["Banned: No one."])
@player_list
@mod_only
@username_command
def commandKick(self, user, fromloc, overriderank, params=[]):
"/kick username [reason] - Mod\nKicks the user off the server."
reason = " ".join(params)
user.sendErrorAction(ACTION_KICK, self.client, reason)
self.client.announceGlobal(ACTION_KICK, user.username, reason)
self.client.sendServerMessage("User %s kicked." % user.username)
@player_list
@director_only
def commandMassKick(self, parts, fromloc, overriderank):
"/mkick - Director\nKicks all users off the server."
for user in self.client.factory.usernames:
if user.lower() != self.client.username.lower():
self.client.factory.usernames[user].sendError("%s kicked everyone!" % self.client.username)
self.client.factory.queue.put((self.client, TASK_SERVERURGENTMESSAGE, "[MASSKICK] %s kicked everyone." % self.client.username))
@player_list
@director_only
@only_username_command
def commandBanBoth(self, username, fromloc, overriderank, params=[]):
"/banb username reason - Director\nName and IP ban a user from this server."
if not params:
self.client.sendServerMessage("Please give a reason.")
else:
if username in self.client.factory.usernames:
self.commandIpban(["/banb", username] + params, fromloc, overriderank)
self.commandBan(["/banb", username] + params, fromloc, overriderank)
@player_list
@admin_only
def commandBan(self, parts, fromloc, overriderank):
"/ban username reason - Admin\nBans the Player from this server."
username = parts[1].lower()
if len(parts) <= 1:
self.client.sendServerMessage("Please specify a reason.")
return
if self.client.factory.isBanned(username):
self.client.sendServerMessage("%s is already banned." % username)
else:
reason = " ".join(parts[2:])
self.client.factory.addBan(username, reason)
if username in self.client.factory.usernames:
self.client.factory.usernames[username].sendErrorAction(ACTION_BAN, self.client, reason)
self.client.announceGlobal(ACTION_BAN, username, reason)
self.client.sendServerMessage("%s has been banned for %s." % (username, reason))
@director_only
def commandIpban(self, parts, fromloc, overriderank):
"/ipban username reason - Director\nBan a user's IP from this server."
if len(parts) >= 2:
username = parts[1].lower()
if username in self.client.factory.usernames:
ip = self.client.factory.usernames[username].transport.getPeer().host
if self.client.factory.isIpBanned(ip):
self.client.sendServerMessage("%s is already IPBanned." % ip)
else:
reason = " ".join(parts[2:])
self.client.factory.addIpBan(ip, reason)
self.client.factory.usernames[username].sendErrorAction(ACTION_IPBAN, self.client, reason)
self.client.announceGlobal(ACTION_IPBAN, username, reason)
self.client.sendServerMessage("%s has been IPBanned." % ip)
else:
self.client.sendServerMessage("%s is not online." % username)
else:
self.client.sendServerMessage("Please include a user to IPBan.")
@player_list
@admin_only
@only_username_command
def commandUnban(self, username, fromloc, overriderank):
"/unban username - Admin\nRemoves the Ban on the user."
if not self.client.factory.isBanned(username):
self.client.sendServerMessage("%s is not banned." % username)
else:
self.client.factory.removeBan(username)
self.client.announceGlobal(ACTION_UNBAN, username)
self.client.sendServerMessage("%s has been unbanned." % username)
@player_list
@director_only
@only_string_command("IP")
def commandUnipban(self, ip, fromloc, overriderank):
"/unipban ip - Director\nRemoves the Ban on the IP."
if not self.client.factory.isIpBanned(ip):
self.client.sendServerMessage("%s is not Banned." % ip)
else:
self.client.factory.removeIpBan(ip)
self.client.sendServerMessage("%s UnBanned." % ip)
@player_list
@admin_only
@only_username_command
def commandReason(self, username, fromloc, overriderank):
"/banreason username - Admin\nGives the reason a user was Banned."
if not self.client.factory.isBanned(username):
self.client.sendServerMessage("%s is not Banned." % username)
else:
self.client.sendServerMessage("Reason: %s" % self.client.factory.banReason(username))
@player_list
@director_only
@only_string_command("IP")
def commandIpreason(self, ip, fromloc, overriderank):
"/ipreason username - Director\nGives the reason an IP was Banned."
if not self.client.factory.isIpBanned(ip):
self.client.sendServerMessage("%s is not Banned." % ip)
else:
self.client.sendServerMessage("Reason: %s" % self.client.factory.ipBanReason(ip))
@player_list
@mod_only
def commandUnFreeze(self, parts, fromloc, overriderank):
"/unfreeze username - Mod\nAliases: defreeze, unstop\nUnfreezes the user, allowing them to move again."
try:
username = parts[1]
except:
self.client.sendServerMessage("No username given.")
return
try:
user = self.client.factory.usernames[username]
except:
self.client.sendServerMessage("User is not online.")
return
user.frozen = False
self.client.sendServerMessage("%s has been unfrozen." % username)
user.sendNormalMessage("&4You have been unfrozen by %s!" % self.client.username)
@player_list
@mod_only
def commandFreeze(self, parts, fromloc, overriderank):
"/freeze username - Mod\nAliases: stop\nFreezes the user, preventing them from moving."
try:
username = parts[1]
except:
self.client.sendServerMessage("No username given.")
return
try:
user = self.client.factory.usernames[username]
except:
self.client.sendErrorMessage("User is not online.")
return
user.frozen = True
if self.client.isOnlyHiddenNotVisibleStaff():
user.sendNormalMessage("&4You have been frozen!")
else:
user.sendNormalMessage("&4You have been frozen by %s!" % self.client.username)
self.client.sendServerMessage("%s has been frozen." % username)
#@player_list
#@mod_only
#@only_username_command
#def commandIpshun(self, username, fromloc, overriderank):
# "/ipspec username - Mod\nAliases: ipshun\nIPSpec a user's IP in this server."
# ip = self.client.factory.usernames[username].transport.getPeer().host
# if self.client.factory.isIpShunned(ip):
# self.client.sendServerMessage("%s is already IPSpecced." % ip)
# else:
# self.client.factory.addIpShun(ip)
# if username in self.client.factory.usernames:
# self.client.factory.usernames[username].sendServerMessage("You got IPSpecced!")
# self.client.sendServerMessage("%s has been IPSpecced." % ip)
# logging.log(logging.INFO,self.client.username + ' IPSpecced ' + username + ip)
#@player_list
#@mod_only
#@only_string_command("IP")
#def commandUnipshun(self, ip, fromloc, overriderank):
# "/unipspec ip - Mod\nAliases: unipshun\nRemoves the IPSpec on the IP."
# if not self.client.factory.isIpShunned(ip):
# self.client.sendServerMessage("%s is not IPSpecced." % ip)
# else:
# self.client.factory.removeIpShun(ip)
# self.client.sendServerMessage("%s UnIPSpecced." % ip)
# logging.log(logging.INFO,self.client.username + ' UnIPSpecced ' + ip)
| 42.292887
| 135
| 0.626731
|
df3476e93e18c7bf312ee47e5dc84704f8526171
| 10,624
|
py
|
Python
|
convai_evaluation.py
|
BSlience/end2end-conversational-ai
|
30d17f8f9b439bd77e9593e99b9de47b4475aa7f
|
[
"MIT"
] | 4
|
2021-03-13T09:26:20.000Z
|
2022-03-29T12:38:40.000Z
|
convai_evaluation.py
|
BSlience/end2end-conversational-ai
|
30d17f8f9b439bd77e9593e99b9de47b4475aa7f
|
[
"MIT"
] | 1
|
2020-12-20T11:07:11.000Z
|
2020-12-20T11:07:11.000Z
|
convai_evaluation.py
|
BSlience/end2end-conversational-ai
|
30d17f8f9b439bd77e9593e99b9de47b4475aa7f
|
[
"MIT"
] | 3
|
2021-03-16T12:49:40.000Z
|
2021-06-29T17:15:52.000Z
|
# # Copyright (c) 2019-present, HuggingFace Inc.
# All rights reserved.
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree.
import random
import logging
from pprint import pformat
from collections import defaultdict
from functools import partial
from tqdm import trange
import torch
import torch.nn.functional as F
from parlai.core.agents import Agent
from parlai.scripts.eval_model import setup_args as base_setup_args
from projects.convai2.eval_hits import eval_hits, setup_args as setup_args_hits
from projects.convai2.eval_f1 import eval_f1, setup_args as setup_args_f1
from projects.convai2.eval_ppl import eval_ppl, setup_args as setup_args_ppl
from projects.convai2.build_dict import build_dict
from transformers import (OpenAIGPTDoubleHeadsModel, OpenAIGPTLMHeadModel, OpenAIGPTTokenizer,
GPT2DoubleHeadsModel, GPT2LMHeadModel, GPT2Tokenizer)
from train import build_input_from_segments, pad_dataset, SPECIAL_TOKENS, add_special_tokens_
from utils import download_pretrained_model, AttrDict
from interact import sample_sequence
class TransformerAgent(Agent):
@staticmethod
def add_cmdline_args(argparser):
agent_args = argparser.add_argument_group('Agent parameters')
agent_args.add_argument("--model_checkpoint", type=str, default="",
help="Path, url or short name of the model. Must be OpenAIGPT.")
agent_args.add_argument("--max_history", type=int, default=2,
help="Number of previous utterances to keep in history")
agent_args.add_argument("--device", type=str, default="cuda" if torch.cuda.is_available() else "cpu",
help="Device (cuda or cpu)")
agent_args.add_argument("--eval_type", type=str, default="hits@1", help="hits@1, ppl or f1")
agent_args.add_argument("--no_sample", action='store_true')
agent_args.add_argument("--max_length", type=int, default=20)
agent_args.add_argument("--min_length", type=int, default=1)
agent_args.add_argument("--seed", type=int, default=0)
agent_args.add_argument("--temperature", type=int, default=0.7)
agent_args.add_argument("--top_k", type=int, default=20)
agent_args.add_argument("--top_p", type=float, default=0.9,
help="Nucleus filtering (top-p) before sampling (<=0.0: no filtering)")
return argparser
def __init__(self, opt, shared=None):
super(TransformerAgent, self).__init__(opt, shared)
args = AttrDict(opt) # to keep most commands identical to the interact.py script
self.args = args
logging.basicConfig(level=logging.INFO)
self.logger = logging.getLogger(__file__)
self.logger.info(pformat(args))
random.seed(args.seed)
torch.random.manual_seed(args.seed)
torch.cuda.manual_seed(args.seed)
if shared is None:
self.logger.info("Get pretrained model and tokenizer")
if args.model_checkpoint == "":
args.model_checkpoint = download_pretrained_model()
if 'gpt2' in args.model_checkpoint:
self.tokenizer = GPT2Tokenizer.from_pretrained(args.model_checkpoint)
model_class = GPT2DoubleHeadsModel if self.args.eval_type == "hits@1" else GPT2LMHeadModel
else:
self.tokenizer = OpenAIGPTTokenizer.from_pretrained(args.model_checkpoint)
model_class = OpenAIGPTDoubleHeadsModel if self.args.eval_type == "hits@1" else OpenAIGPTLMHeadModel
self.model_checkpoint = model_class.from_pretrained(args.model_checkpoint)
self.model_checkpoint.to(args.device)
self.logger.info("Build BPE prefix dictionary")
convai_dict = build_dict()
assert len(convai_dict) == 19304
self.prefix2words = self.get_prefix2words(convai_dict)
else:
self.model_checkpoint = shared['model']
self.tokenizer = shared['tokenizer']
self.prefix2words = shared['prefix2words']
add_special_tokens_(self.model_checkpoint, self.tokenizer)
self.special_tokens_ids = self.tokenizer.convert_tokens_to_ids(SPECIAL_TOKENS)
self.persona = []
self.history = []
self.labels = []
self.reset()
def observe(self, observation):
if self.episode_done:
self.reset()
if self.labels:
# Add the previous response to the history
self.history.append(self.labels)
if 'labels' in observation or 'eval_labels' in observation:
text = observation.get('labels', observation.get('eval_labels', [[]]))[0]
self.labels = self.tokenizer.encode(text)
if 'text' in observation:
text = observation['text']
for subtext in text.split('\n'):
subtext = subtext.strip()
if subtext.startswith('your persona:'):
subtext = subtext.replace('your persona:', '').strip()
self.persona.append(self.tokenizer.encode(subtext))
else:
self.history.append(self.tokenizer.encode(subtext))
self.history = self.history[-(2 * self.args.max_history + 1):]
candidates = []
if 'label_candidates' in observation:
for candidate in observation['label_candidates']:
candidates.append((self.tokenizer.encode(candidate), candidate))
self.candidates = candidates
self.episode_done = observation['episode_done']
self.observation = observation
return observation
def act(self):
reply = {}
if self.args.eval_type == "hits@1" and len(self.candidates) > 0:
instances = defaultdict(list)
for candidate, _ in self.candidates:
instance = build_input_from_segments(self.persona, self.history, candidate, self.tokenizer)
for input_name, input_array in instance.items():
instances[input_name].append(input_array)
inputs = pad_dataset(instances, padding=self.special_tokens_ids[-1])
tensor_inputs = {}
for input_name in ["input_ids", "mc_token_ids", "token_type_ids"]:
tensor = torch.tensor(inputs[input_name], device=self.args.device)
tensor = tensor.view((-1, len(self.candidates)) + tensor.shape[1:])
tensor_inputs[input_name] = tensor
with torch.no_grad():
mc_logits = self.model_checkpoint(**tensor_inputs)[1]
val, ind = torch.sort(mc_logits[0], descending=True)
ypred = self.candidates[ind[0].item()][1] # match
tc = []
for j in range(len(self.candidates)):
tc.append(self.candidates[ind[j].item()][1])
reply = {'text': ypred, 'text_candidates': tc}
else:
# We are in interactive of f1 evaluation mode => just sample
with torch.no_grad():
out_ids = sample_sequence(self.persona, self.history, self.tokenizer, self.model_checkpoint, self.args)
out_text = self.tokenizer.decode(out_ids, skip_special_tokens=True,
clean_up_tokenization_spaces=(self.args.eval_type != 'f1'))
reply = {'text': out_text}
return reply
def next_word_probability(self, partial_out):
"""Return probability distribution over next words given an input and
partial true output. This is used to calculate the per-word perplexity.
"""
partial_out_ids = self.tokenizer.encode(' '.join(partial_out))
instance = build_input_from_segments(self.persona, self.history, partial_out_ids,
self.tokenizer, with_eos=False)
input_ids = torch.tensor(instance["input_ids"], device=self.args.device).unsqueeze(0)
token_type_ids = torch.tensor(instance["token_type_ids"], device=self.args.device).unsqueeze(0)
with torch.no_grad():
logits = self.model_checkpoint(input_ids, token_type_ids=token_type_ids)
if isinstance(logits, tuple): # for gpt2 and maybe others
logits = logits[0]
probs = F.softmax(logits[0, -1], dim=0)
dist = {}
for prefix_id, words in self.prefix2words.items():
for word, ratio in words.items():
dist[word] = probs[prefix_id].item() * ratio
return dist
def get_prefix2words(self, convai_dict, smoothing_freq=5):
""" map BPE-prefix => dict(full_words beginning with BPE-prefix, associated words_counts) """
prefix2words = defaultdict(dict)
for i in trange(len(convai_dict)):
word = convai_dict[i]
freq = convai_dict.freq[word] + smoothing_freq
bpe_tokens = self.tokenizer.bpe(word).split(' ')
prefix_id = self.tokenizer.convert_tokens_to_ids(bpe_tokens[0])
prefix2words[prefix_id].update(dict([(word, freq)]))
for prefix_id, words in prefix2words.items():
total_counts = sum(words.values())
prefix2words[prefix_id] = dict((word, count / total_counts) for word, count in words.items())
return prefix2words
def share(self):
shared = super(TransformerAgent, self).share()
shared['tokenizer'] = self.tokenizer
shared['model'] = self.model_checkpoint
shared['prefix2words'] = self.prefix2words
return shared
def reset(self):
self.persona = []
self.history = []
self.labels = []
self.candidates = []
self.episode_done = True
self.observation = None
if __name__ == '__main__':
parser = base_setup_args(None)
parser.set_params(
model='convai_evaluation:TransformerAgent')
opt = parser.parse_args(print_args=False)
if opt['eval_type'] == "hits@1":
setup_args = setup_args_hits(None)
eval_fct = partial(eval_hits, print_parser=setup_args)
elif opt['eval_type'] == "ppl":
setup_args = setup_args_ppl(None)
eval_fct = eval_ppl
elif opt['eval_type'] == "f1":
setup_args = setup_args_f1(None)
eval_fct = partial(eval_f1, print_parser=setup_args)
else:
raise ValueError
setup_args.set_params(
model='convai_evaluation:TransformerAgent')
opt = setup_args.parse_args(print_args=False)
eval_fct(opt)
| 43.363265
| 119
| 0.641566
|
123ee185ef9425a9e7f9993853380084fdb54c9a
| 3,005
|
py
|
Python
|
isi_sdk_8_1_1/isi_sdk_8_1_1/models/providers_ads.py
|
mohitjain97/isilon_sdk_python
|
a371f438f542568edb8cda35e929e6b300b1177c
|
[
"Unlicense"
] | 24
|
2018-06-22T14:13:23.000Z
|
2022-03-23T01:21:26.000Z
|
isi_sdk_8_1_1/isi_sdk_8_1_1/models/providers_ads.py
|
mohitjain97/isilon_sdk_python
|
a371f438f542568edb8cda35e929e6b300b1177c
|
[
"Unlicense"
] | 46
|
2018-04-30T13:28:22.000Z
|
2022-03-21T21:11:07.000Z
|
isi_sdk_8_1_1/isi_sdk_8_1_1/models/providers_ads.py
|
mohitjain97/isilon_sdk_python
|
a371f438f542568edb8cda35e929e6b300b1177c
|
[
"Unlicense"
] | 29
|
2018-06-19T00:14:04.000Z
|
2022-02-08T17:51:19.000Z
|
# coding: utf-8
"""
Isilon SDK
Isilon SDK - Language bindings for the OneFS API # noqa: E501
OpenAPI spec version: 6
Contact: sdk@isilon.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
from isi_sdk_8_1_1.models.providers_ads_ads_item import ProvidersAdsAdsItem # noqa: F401,E501
class ProvidersAds(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'ads': 'list[ProvidersAdsAdsItem]'
}
attribute_map = {
'ads': 'ads'
}
def __init__(self, ads=None): # noqa: E501
"""ProvidersAds - a model defined in Swagger""" # noqa: E501
self._ads = None
self.discriminator = None
if ads is not None:
self.ads = ads
@property
def ads(self):
"""Gets the ads of this ProvidersAds. # noqa: E501
:return: The ads of this ProvidersAds. # noqa: E501
:rtype: list[ProvidersAdsAdsItem]
"""
return self._ads
@ads.setter
def ads(self, ads):
"""Sets the ads of this ProvidersAds.
:param ads: The ads of this ProvidersAds. # noqa: E501
:type: list[ProvidersAdsAdsItem]
"""
self._ads = ads
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ProvidersAds):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 26.130435
| 94
| 0.557404
|
9dafdd9cdeac4bee1b653d307fc577355a1f55e9
| 113
|
py
|
Python
|
Exercise81.py
|
JBCFurtado/Rabiscos_Em_Python
|
a1a5be9391e1bbbb301b8a7776043f7ea77e24da
|
[
"MIT"
] | null | null | null |
Exercise81.py
|
JBCFurtado/Rabiscos_Em_Python
|
a1a5be9391e1bbbb301b8a7776043f7ea77e24da
|
[
"MIT"
] | null | null | null |
Exercise81.py
|
JBCFurtado/Rabiscos_Em_Python
|
a1a5be9391e1bbbb301b8a7776043f7ea77e24da
|
[
"MIT"
] | null | null | null |
#import random
#print(random.randrange(1, 7))
import random
for i in range(5):
print(random.randint(1, 7))
| 14.125
| 31
| 0.690265
|
b98d0536459fe3bd039757a69d1a760f52ff1608
| 2,079
|
py
|
Python
|
tests/integration/exception_test.py
|
gglin001/popart
|
3225214343f6d98550b6620e809a3544e8bcbfc6
|
[
"MIT"
] | null | null | null |
tests/integration/exception_test.py
|
gglin001/popart
|
3225214343f6d98550b6620e809a3544e8bcbfc6
|
[
"MIT"
] | null | null | null |
tests/integration/exception_test.py
|
gglin001/popart
|
3225214343f6d98550b6620e809a3544e8bcbfc6
|
[
"MIT"
] | null | null | null |
# Copyright (c) 2019 Graphcore Ltd. All rights reserved.
import numpy as np
import popart
import pytest
import tempfile
# `import test_util` requires adding to sys.path
import sys
from pathlib import Path
sys.path.append(str(Path(__file__).resolve().parent.parent))
import test_util as tu
# test that we can get the graph & summary report after an out of memory exception
# This test currently requires hardware, as the ipu model does not throw an exception
# when it run's out of memory
@tu.requires_ipu
def test_out_of_memory_exception():
deviceInfo = tu.create_test_device(1, tilesPerIPU=tu.USE_ALL_TILES)
builder = popart.Builder()
inputs = [
builder.addInputTensor(popart.TensorInfo("FLOAT", [2000, 2000]))
for i in range(8)
]
# Matmul every input against every other input
activations = []
for a in inputs:
for b in inputs:
c = builder.aiOnnx.matmul([a, b])
activations.append(c)
# Sum all the activations together
out = builder.aiOnnx.sum(activations)
builder.addOutputTensor(out)
options = popart.SessionOptions()
options.defaultPrefetchBufferingDepth = 1
tempDir = tempfile.TemporaryDirectory()
options.engineOptions = {
"debug.allowOutOfMemory": "true",
"autoReport.outputGraphProfile": "true",
"autoReport.directory": tempDir.name
}
patterns = popart.Patterns(popart.PatternsLevel.NoPatterns)
patterns.enableRuntimeAsserts(False)
session = popart.InferenceSession(
fnModel=builder.getModelProto(),
dataFlow=popart.DataFlow(1, {out: popart.AnchorReturnType("All")}),
userOptions=options,
patterns=patterns,
deviceInfo=deviceInfo)
with pytest.raises(popart.popart_exception) as e:
session.prepareDevice()
assert e.type == popart.session.OutOfMemoryException
print(e.value.getSummaryReport())
assert len(e.value.getProfilePath()) > 0
assert e.value.args[0].startswith(
"Out of memory: Cannot fit all variable data onto one or more tiles")
| 31.5
| 85
| 0.702742
|
32a4d6a8b771d002f1c1fd3a8e57bf56b207b891
| 3,375
|
py
|
Python
|
scripts/ntrip_ros.py
|
LORD-MicroStrain/ntrip_client
|
3c30cbd9be25c26963b84b370be09405f194fb9c
|
[
"MIT"
] | 6
|
2021-12-07T18:25:31.000Z
|
2022-01-25T09:39:34.000Z
|
scripts/ntrip_ros.py
|
LORD-MicroStrain/ntrip_client-release
|
2106bc9079c3ff940309661d91c109f93a61767f
|
[
"MIT"
] | 2
|
2022-01-14T09:29:05.000Z
|
2022-03-24T15:45:06.000Z
|
scripts/ntrip_ros.py
|
LORD-MicroStrain/ntrip_client-release
|
2106bc9079c3ff940309661d91c109f93a61767f
|
[
"MIT"
] | 1
|
2022-02-23T10:35:31.000Z
|
2022-02-23T10:35:31.000Z
|
#!/usr/bin/env python
import os
import sys
import json
import rospy
from std_msgs.msg import Header
from mavros_msgs.msg import RTCM
from nmea_msgs.msg import Sentence
from ntrip_client.ntrip_client import NTRIPClient
class NTRIPRos:
def __init__(self):
# Read a debug flag from the environment that should have been set by the launch file
try:
self._debug = json.loads(os.environ["NTRIP_CLIENT_DEBUG"].lower())
except:
self._debug = False
# Init the node and read some mandatory config
if self._debug:
rospy.init_node('ntrip_client', anonymous=True, log_level=rospy.DEBUG)
else:
rospy.init_node('ntrip_client', anonymous=True)
host = rospy.get_param('~host', '127.0.0.1')
port = rospy.get_param('~port', '2101')
mountpoint = rospy.get_param('~mountpoint', 'mount')
# Optionally get the ntrip version from the launch file
ntrip_version = rospy.get_param('~ntrip_version', None)
if ntrip_version == '':
ntrip_version = None
# If we were asked to authenticate, read the username and password
username = None
password = None
if rospy.get_param('~authenticate', False):
username = rospy.get_param('~username', None)
password = rospy.get_param('~password', None)
if username is None:
rospy.logerr(
'Requested to authenticate, but param "username" was not set')
sys.exit(1)
if password is None:
rospy.logerr(
'Requested to authenticate, but param "password" was not set')
sys.exit(1)
# Read an optional Frame ID from the config
self._rtcm_frame_id = rospy.get_param('~rtcm_frame_id', 'odom')
# Setup the RTCM publisher
self._rtcm_timer = None
self._rtcm_pub = rospy.Publisher('rtcm', RTCM, queue_size=10)
# Initialize the client
self._client = NTRIPClient(
host=host,
port=port,
mountpoint=mountpoint,
ntrip_version=ntrip_version,
username=username,
password=password,
logerr=rospy.logerr,
logwarn=rospy.logwarn,
loginfo=rospy.loginfo,
logdebug=rospy.logdebug
)
def run(self):
# Setup a shutdown hook
rospy.on_shutdown(self.stop)
# Connect the client
if not self._client.connect():
rospy.logerr('Unable to connect to NTRIP server')
return 1
# Setup our subscriber
self._nmea_sub = rospy.Subscriber('nmea', Sentence, self.subscribe_nmea, queue_size=10)
# Start the timer that will check for RTCM data
self._rtcm_timer = rospy.Timer(rospy.Duration(0.1), self.publish_rtcm)
# Spin until we are shutdown
rospy.spin()
return 0
def stop(self):
rospy.loginfo('Stopping RTCM publisher')
if self._rtcm_timer:
self._rtcm_timer.shutdown()
self._rtcm_timer.join()
rospy.loginfo('Disconnecting NTRIP client')
self._client.disconnect()
def subscribe_nmea(self, nmea):
# Just extract the NMEA from the message, and send it right to the server
self._client.send_nmea(nmea.sentence)
def publish_rtcm(self, event):
for raw_rtcm in self._client.recv_rtcm():
self._rtcm_pub.publish(RTCM(
header=Header(
stamp=rospy.Time.now(),
frame_id=self._rtcm_frame_id
),
data=raw_rtcm
))
if __name__ == '__main__':
ntrip_ros = NTRIPRos()
sys.exit(ntrip_ros.run())
| 28.601695
| 91
| 0.674074
|
cc8288f3ac70982060639f63208bccdcdd12aa32
| 7,703
|
gyp
|
Python
|
media/cast/cast.gyp
|
iplo/Chain
|
8bc8943d66285d5258fffc41bed7c840516c4422
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 231
|
2015-01-08T09:04:44.000Z
|
2021-12-30T03:03:10.000Z
|
media/cast/cast.gyp
|
JasonEric/chromium
|
c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 1
|
2017-02-14T21:55:58.000Z
|
2017-02-14T21:55:58.000Z
|
media/cast/cast.gyp
|
JasonEric/chromium
|
c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 268
|
2015-01-21T05:53:28.000Z
|
2022-03-25T22:09:01.000Z
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'variables': {
'include_tests%': 1,
'chromium_code': 1,
},
'targets': [
{
'target_name': 'cast_config',
'type': 'static_library',
'include_dirs': [
'<(DEPTH)/',
],
'dependencies': [
'<(DEPTH)/base/base.gyp:base',
],
'sources': [
'cast_config.cc',
'cast_config.h',
'cast_defines.h',
'cast_environment.cc',
'cast_environment.h',
'logging/logging_defines.cc',
'logging/logging_defines.h',
'logging/logging_impl.cc',
'logging/logging_impl.h',
'logging/logging_raw.cc',
'logging/logging_raw.h',
'logging/logging_stats.cc',
'logging/logging_stats.h',
'logging/raw_event_subscriber.h',
'logging/simple_event_subscriber.cc',
'logging/simple_event_subscriber.h',
], # source
},
{
'target_name': 'cast_logging_proto_lib',
'type': 'static_library',
'sources': [
'logging/proto/proto_utils.cc',
'logging/proto/raw_events.proto',
],
'variables': {
'proto_in_dir': 'logging/proto',
'proto_out_dir': 'media/cast/logging/proto',
},
'includes': ['../../build/protoc.gypi'],
},
{
'target_name': 'sender_logging',
'type': 'static_library',
'include_dirs': [
'<(DEPTH)/',
],
'dependencies': [
'cast_config',
'cast_logging_proto_lib',
'<(DEPTH)/base/base.gyp:base',
],
'export_dependent_settings': [
'cast_logging_proto_lib',
],
'sources': [
'logging/encoding_event_subscriber.cc',
'logging/encoding_event_subscriber.h',
'logging/log_serializer.cc',
'logging/log_serializer.h',
], # source
},
], # targets,
'conditions': [
['include_tests==1', {
'targets': [
{
'target_name': 'cast_unittests',
'type': '<(gtest_target_type)',
'dependencies': [
'cast_config',
'cast_logging_proto_lib',
'cast_receiver.gyp:cast_receiver',
'cast_sender.gyp:cast_sender',
'sender_logging',
'test/utility/utility.gyp:cast_test_utility',
'transport/cast_transport.gyp:cast_transport',
'<(DEPTH)/base/base.gyp:run_all_unittests',
'<(DEPTH)/base/base.gyp:test_support_base',
'<(DEPTH)/net/net.gyp:net',
'<(DEPTH)/testing/gmock.gyp:gmock',
'<(DEPTH)/testing/gtest.gyp:gtest',
],
'include_dirs': [
'<(DEPTH)/',
'<(DEPTH)/third_party/',
'<(DEPTH)/third_party/webrtc/',
],
'sources': [
'audio_receiver/audio_decoder_unittest.cc',
'audio_receiver/audio_receiver_unittest.cc',
'audio_sender/audio_encoder_unittest.cc',
'audio_sender/audio_sender_unittest.cc',
'congestion_control/congestion_control_unittest.cc',
'framer/cast_message_builder_unittest.cc',
'framer/frame_buffer_unittest.cc',
'framer/framer_unittest.cc',
'logging/encoding_event_subscriber_unittest.cc',
'logging/logging_impl_unittest.cc',
'logging/logging_raw_unittest.cc',
'logging/simple_event_subscriber_unittest.cc',
'rtcp/mock_rtcp_receiver_feedback.cc',
'rtcp/mock_rtcp_receiver_feedback.h',
'rtcp/mock_rtcp_sender_feedback.cc',
'rtcp/mock_rtcp_sender_feedback.h',
'rtcp/rtcp_receiver_unittest.cc',
'rtcp/rtcp_sender_unittest.cc',
'rtcp/rtcp_unittest.cc',
'rtcp/receiver_rtcp_event_subscriber_unittest.cc',
'rtcp/sender_rtcp_event_subscriber_unittest.cc',
'rtp_receiver/rtp_receiver_defines.h',
'rtp_receiver/mock_rtp_payload_feedback.cc',
'rtp_receiver/mock_rtp_payload_feedback.h',
'rtp_receiver/receiver_stats_unittest.cc',
'rtp_receiver/rtp_parser/test/rtp_packet_builder.cc',
'rtp_receiver/rtp_parser/rtp_parser_unittest.cc',
'test/encode_decode_test.cc',
'test/end2end_unittest.cc',
'test/fake_gpu_video_accelerator_factories.cc',
'test/fake_gpu_video_accelerator_factories.h',
'test/fake_single_thread_task_runner.cc',
'test/fake_single_thread_task_runner.h',
'test/fake_video_encode_accelerator.cc',
'test/fake_video_encode_accelerator.h',
'transport/pacing/mock_paced_packet_sender.cc',
'transport/pacing/mock_paced_packet_sender.h',
'transport/pacing/paced_sender_unittest.cc',
'transport/rtp_sender/packet_storage/packet_storage_unittest.cc',
'transport/rtp_sender/rtp_packetizer/rtp_packetizer_unittest.cc',
'transport/rtp_sender/rtp_packetizer/test/rtp_header_parser.cc',
'transport/rtp_sender/rtp_packetizer/test/rtp_header_parser.h',
'transport/transport/udp_transport_unittest.cc',
'video_receiver/video_decoder_unittest.cc',
'video_receiver/video_receiver_unittest.cc',
'video_sender/external_video_encoder_unittest.cc',
'video_sender/video_encoder_impl_unittest.cc',
'video_sender/video_sender_unittest.cc',
], # source
},
{
'target_name': 'cast_sender_app',
'type': 'executable',
'include_dirs': [
'<(DEPTH)/',
],
'dependencies': [
'cast_config',
'sender_logging',
'<(DEPTH)/ui/gfx/gfx.gyp:gfx',
'<(DEPTH)/net/net.gyp:net_test_support',
'<(DEPTH)/media/cast/cast_sender.gyp:*',
'<(DEPTH)/media/media.gyp:media',
'<(DEPTH)/testing/gtest.gyp:gtest',
'<(DEPTH)/third_party/opus/opus.gyp:opus',
'<(DEPTH)/media/cast/transport/cast_transport.gyp:cast_transport',
'<(DEPTH)/media/cast/test/utility/utility.gyp:cast_test_utility',
],
'sources': [
'<(DEPTH)/media/cast/test/sender.cc',
],
},
{
'target_name': 'cast_receiver_app',
'type': 'executable',
'include_dirs': [
'<(DEPTH)/',
],
'dependencies': [
'cast_config',
'<(DEPTH)/ui/gfx/gfx.gyp:gfx',
'<(DEPTH)/net/net.gyp:net_test_support',
'<(DEPTH)/media/cast/cast_receiver.gyp:*',
'<(DEPTH)/media/media.gyp:media',
'<(DEPTH)/testing/gtest.gyp:gtest',
'<(DEPTH)/media/cast/transport/cast_transport.gyp:cast_transport',
'<(DEPTH)/media/cast/test/utility/utility.gyp:cast_test_utility',
'<(DEPTH)/third_party/libyuv/libyuv.gyp:libyuv',
],
'sources': [
'<(DEPTH)/media/cast/test/receiver.cc',
],
'conditions': [
['OS == "linux" and use_x11==1', {
'dependencies': [
'<(DEPTH)/build/linux/system.gyp:x11',
'<(DEPTH)/build/linux/system.gyp:xext',
],
'sources': [
'<(DEPTH)/media/cast/test/linux_output_window.cc',
'<(DEPTH)/media/cast/test/linux_output_window.h',
],
}],
],
},
], # targets
}], # include_tests
],
}
| 36.856459
| 78
| 0.565754
|
ae0a5a6ff059833d19e9c48dfd717206242a964a
| 1,281
|
py
|
Python
|
oldp/apps/courts/tests/test_views.py
|
docsuleman/oldp
|
8dcaa8e6e435794c872346b5014945ace885adb4
|
[
"MIT"
] | 66
|
2018-05-07T12:34:39.000Z
|
2022-02-23T20:14:24.000Z
|
oldp/apps/courts/tests/test_views.py
|
Justice-PLP-DHV/oldp
|
eadf235bb0925453d9a5b81963a0ce53afeb17fd
|
[
"MIT"
] | 68
|
2018-06-11T16:13:17.000Z
|
2022-02-10T08:03:26.000Z
|
oldp/apps/courts/tests/test_views.py
|
Justice-PLP-DHV/oldp
|
eadf235bb0925453d9a5b81963a0ce53afeb17fd
|
[
"MIT"
] | 15
|
2018-06-23T19:41:13.000Z
|
2021-08-18T08:21:49.000Z
|
from django.test import tag
from django.urls import reverse
from oldp.apps.lib.tests import ExtendedLiveServerTestCase
@tag('views')
class CourtsViewsTestCase(ExtendedLiveServerTestCase):
fixtures = [
'locations/countries.json',
'locations/states.json',
'locations/cities.json',
'courts/courts.json',
]
def test_index(self):
res = self.client.get(reverse('courts:index'))
self.assertContains(res, 'Amtsgericht Aalen')
self.assertContains(res, 'EuGH')
self.assertContains(res, 'Unknown state')
def test_index_filter(self):
res = self.client.get(reverse('courts:index') + '?state=1')
self.assertNotContains(res, 'Amtsgericht Aalen')
self.assertContains(res, 'EuGH')
self.assertContains(res, 'Unknown court')
self.assertStringOrder(res, 'EuGH', 'Unknown court')
# With reverse order
res = self.client.get(reverse('courts:index') + '?o=-name')
self.assertStringOrder(res, 'Unknown court', 'EuGH')
def test_detail(self):
res = self.client.get(reverse('courts:detail', args=('ag-aalen',)))
self.assertContains(res, 'Amtsgericht Aalen')
self.assertContains(res, 'AGAALEN')
# TODO test for cases
| 29.113636
| 75
| 0.649493
|
7a8833f6dc5fd9d77887f3cd959c279383421cf8
| 530
|
py
|
Python
|
packages/python/plotly/plotly/validators/layout/ternary/baxis/_exponentformat.py
|
mastermind88/plotly.py
|
efa70710df1af22958e1be080e105130042f1839
|
[
"MIT"
] | null | null | null |
packages/python/plotly/plotly/validators/layout/ternary/baxis/_exponentformat.py
|
mastermind88/plotly.py
|
efa70710df1af22958e1be080e105130042f1839
|
[
"MIT"
] | null | null | null |
packages/python/plotly/plotly/validators/layout/ternary/baxis/_exponentformat.py
|
mastermind88/plotly.py
|
efa70710df1af22958e1be080e105130042f1839
|
[
"MIT"
] | null | null | null |
import _plotly_utils.basevalidators
class ExponentformatValidator(_plotly_utils.basevalidators.EnumeratedValidator):
def __init__(
self, plotly_name="exponentformat", parent_name="layout.ternary.baxis", **kwargs
):
super(ExponentformatValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "plot"),
values=kwargs.pop("values", ["none", "e", "E", "power", "SI", "B"]),
**kwargs,
)
| 35.333333
| 88
| 0.637736
|
95f5cac22f687f2045a8abc780aec1946e8685be
| 7,914
|
py
|
Python
|
api-ref/source/conf.py
|
mail2nsrajesh/networking-sfc
|
065ead63fbda222ffd12e8d7c9db197c39daac98
|
[
"Apache-2.0"
] | null | null | null |
api-ref/source/conf.py
|
mail2nsrajesh/networking-sfc
|
065ead63fbda222ffd12e8d7c9db197c39daac98
|
[
"Apache-2.0"
] | null | null | null |
api-ref/source/conf.py
|
mail2nsrajesh/networking-sfc
|
065ead63fbda222ffd12e8d7c9db197c39daac98
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# networking-sfc documentation build configuration file, created by
# sphinx-quickstart on Sat May 1 15:17:47 2010.
#
# This file is execfile()d with the current directory set to
# its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import os
import subprocess
import sys
import warnings
import openstackdocstheme
html_theme = 'openstackdocs'
html_theme_path = [openstackdocstheme.get_html_theme_path()]
html_theme_options = {
"sidebar_mode": "toc",
}
extensions = [
'os_api_ref',
]
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('../../'))
sys.path.insert(0, os.path.abspath('../'))
sys.path.insert(0, os.path.abspath('./'))
# -- General configuration ----------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#
# source_encoding = 'utf-8'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Networking SFC API Reference'
copyright = u'2010-present, OpenStack Foundation'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# from neutron.version import version_info
# The full version, including alpha/beta/rc tags.
# release = version_info.release_string()
release = '1.0.0'
# The short X.Y version.
# version = version_info.version_string()
version = '1.0.0.'
# Config logABug feature
giturl = (u'https://git.openstack.org/'
u'cgit/openstack/networking-sfc/tree/api-ref/source')
# source tree
# html_context allows us to pass arbitrary values into the html template
html_context = {"bug_tag": "api-ref",
"giturl": giturl,
"bug_project": "networking-sfc"}
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
# today = ''
# Else, today_fmt is used as the format for a strftime call.
# today_fmt = '%B %d, %Y'
# The reST default role (used for this markup: `text`) to use
# for all documents.
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
add_module_names = False
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# -- Options for man page output ----------------------------------------------
# Grouping the document tree for man pages.
# List of tuples 'sourcefile', 'target', u'title', u'Authors name', 'manual'
# -- Options for HTML output --------------------------------------------------
# The theme to use for HTML and HTML Help pages. Major themes that come with
# Sphinx are currently 'default' and 'sphinxdoc'.
# html_theme_path = ["."]
# html_theme = '_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
# html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
# html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
# html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
# html_last_updated_fmt = '%b %d, %Y'
git_cmd = ["git", "log", "--pretty=format:'%ad, commit %h'", "--date=local",
"-n1"]
try:
html_last_updated_fmt = subprocess.check_output(git_cmd).decode('utf-8')
except Exception:
warnings.warn('Cannot get last updated time from git repository. '
'Not setting "html_last_updated_fmt".')
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
# html_additional_pages = {}
# If false, no module index is generated.
# html_use_modindex = True
# If false, no index is generated.
# html_use_index = True
# If true, the index is split into individual pages for each letter.
# html_split_index = False
# If true, links to the reST sources are added to the pages.
# html_show_sourcelink = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
# html_use_opensearch = ''
# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = ''
# Output file base name for HTML help builder.
htmlhelp_basename = 'networkingsfcdoc'
# -- Options for LaTeX output -------------------------------------------------
# The paper size ('letter' or 'a4').
# latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
# latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass
# [howto/manual]).
latex_documents = [
('index', 'NetworkingSfc.tex',
u'OpenStack Networking SFC API Documentation',
u'OpenStack Foundation', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
# latex_use_parts = False
# Additional stuff for the LaTeX preamble.
# latex_preamble = ''
# Documents to append as an appendix to all manuals.
# latex_appendices = []
# If false, no module index is generated.
# latex_use_modindex = True
| 32.838174
| 79
| 0.710892
|
0e7a1937bb02cca435727ab5769b08a4781661e7
| 2,489
|
py
|
Python
|
gpxpy/xmlutil.py
|
joelcarranza/gpxpy
|
ad76ae90afa2457b34c836cc1708f2994e381ab7
|
[
"MIT"
] | 2
|
2017-04-26T16:45:44.000Z
|
2017-08-08T16:29:43.000Z
|
gpxpy/xmlutil.py
|
joelcarranza/gpxpy
|
ad76ae90afa2457b34c836cc1708f2994e381ab7
|
[
"MIT"
] | null | null | null |
gpxpy/xmlutil.py
|
joelcarranza/gpxpy
|
ad76ae90afa2457b34c836cc1708f2994e381ab7
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# encoding: utf-8
"""
xmlutil.py
Some simple utilities for working with xml
Created by Joel Carranza on 2011-06-12.
Copyright (c) 2011 __MyCompanyName__. All rights reserved.
"""
from xml.etree.cElementTree import Element
import pytz
# this is going to fail on 2.5???
import isodate
# Taken from: http://infix.se/2007/02/06/gentlemen-indent-your-xml
def indent(elem, level=0):
"Indents an ElementTree"
i = "\n" + level*" "
if len(elem):
if not elem.text or not elem.text.strip():
elem.text = i + " "
for e in elem:
indent(e, level+1)
if not e.tail or not e.tail.strip():
e.tail = i + " "
if not e.tail or not e.tail.strip():
e.tail = i
else:
if level and (not elem.tail or not elem.tail.strip()):
elem.tail = i
class XAttr(object):
"""Really simple model for dealing with xml"""
def __init__(self, name,elname=None,type='s',attr=False):
self.name = name
self.elname = elname if elname else name
self.type = type
self.attr = attr
def tostr(self,value):
if self.type == 'd':
text = isodate.datetime_isoformat(value.astimezone(pytz.utc))
else:
text = str(value)
return text
def fromstr(self,text):
type = self.type
if type == 's':
value = text
elif type == 'd':
value = isodate.parse_datetime(text).astimezone(pytz.utc)
elif type == 'i':
value = int(text)
elif type == 'n':
value = float(text)
else:
raise Error("Unknown format")
return value
def init(self,values,attrs):
for attr in attrs:
if attr.name in values:
setattr(self,attr.name,values[attr.name])
else:
setattr(self,attr.name,None)
def parse(ns,el,attrs):
"parse from XML element to construct model"
model = dict()
for attr in attrs:
value = None
text = None
if attr.attr:
text = el.attrib[attr.elname]
else:
child = el.find("{%s}%s" % (ns,attr.elname))
if child is not None:
text = child.text
if text:
model[attr.name] = attr.fromstr(text)
return model
def write(el,model,attrs):
"construct element representing model from attributes"
for attr in attrs:
value = getattr(model,attr.name)
if value is not None:
text = attr.tostr(value)
if attr.attr:
el.attrib[attr.elname] = text
else:
c = Element(attr.elname)
c.text = text
el.append(c)
| 25.397959
| 67
| 0.609482
|
222e37a39a4c7178d18936ba6d96af72444b0000
| 5,060
|
py
|
Python
|
ee/api/app.py
|
thundermiracle/openreplay
|
b3c9a80a8c55b46c90070d63a4510d0ecdb19fbc
|
[
"MIT"
] | 1
|
2021-09-28T15:24:31.000Z
|
2021-09-28T15:24:31.000Z
|
ee/api/app.py
|
aayushgautam/openreplay
|
3298230c3a04fe537794bf396bdaf695c81301c6
|
[
"MIT"
] | 2
|
2022-02-15T00:07:39.000Z
|
2022-02-27T22:54:49.000Z
|
ee/api/app.py
|
aayushgautam/openreplay
|
3298230c3a04fe537794bf396bdaf695c81301c6
|
[
"MIT"
] | null | null | null |
import sentry_sdk
from chalice import Chalice, Response
from sentry_sdk import configure_scope
from chalicelib import _overrides
from chalicelib.blueprints import bp_authorizers
from chalicelib.blueprints import bp_core, bp_core_crons
from chalicelib.blueprints import bp_core_dynamic, bp_core_dynamic_crons
from chalicelib.blueprints.subs import bp_dashboard
from chalicelib.utils import helper
from chalicelib.utils import pg_client
from chalicelib.utils.helper import environ
from chalicelib.blueprints import bp_ee, bp_ee_crons, bp_saml
app = Chalice(app_name='parrot')
app.debug = not helper.is_production() or helper.is_local()
sentry_sdk.init(environ["sentryURL"])
# Monkey-patch print for DataDog hack
import sys
import traceback
old_tb = traceback.print_exception
old_f = sys.stdout
old_e = sys.stderr
OR_SESSION_TOKEN = None
class F:
def write(self, x):
if OR_SESSION_TOKEN is not None and x != '\n' and not helper.is_local():
old_f.write(f"[or_session_token={OR_SESSION_TOKEN}] {x}")
else:
old_f.write(x)
def flush(self):
pass
def tb_print_exception(etype, value, tb, limit=None, file=None, chain=True):
if OR_SESSION_TOKEN is not None and not helper.is_local():
value = type(value)(f"[or_session_token={OR_SESSION_TOKEN}] " + str(value))
old_tb(etype, value, tb, limit, file, chain)
if helper.is_production():
traceback.print_exception = tb_print_exception
sys.stdout = F()
sys.stderr = F()
# ---End Monkey-patch
_overrides.chalice_app(app)
@app.middleware('http')
def or_middleware(event, get_response):
from chalicelib.core import unlock
if not unlock.is_valid():
return Response(body={"errors": ["expired license"]}, status_code=403)
if "{projectid}" in event.path.lower():
from chalicelib.core import projects
if event.context["authorizer"].get("authorizer_identity") == "api_key" \
and not projects.is_authorized(
project_id=projects.get_internal_project_id(event.uri_params["projectId"]),
tenant_id=event.context["authorizer"]["tenantId"]) \
or event.context["authorizer"].get("authorizer_identity", "jwt") == "jwt" \
and not projects.is_authorized(project_id=event.uri_params["projectId"],
tenant_id=event.context["authorizer"]["tenantId"]):
print("unauthorized project")
pg_client.close()
return Response(body={"errors": ["unauthorized project"]}, status_code=401)
global OR_SESSION_TOKEN
OR_SESSION_TOKEN = app.current_request.headers.get('vnd.openreplay.com.sid',
app.current_request.headers.get('vnd.asayer.io.sid'))
if "authorizer" in event.context and event.context["authorizer"] is None:
print("Deleted user!!")
pg_client.close()
return Response(body={"errors": ["Deleted user"]}, status_code=403)
try:
if helper.TRACK_TIME:
import time
now = int(time.time() * 1000)
response = get_response(event)
if response.status_code == 200 and response.body is not None and response.body.get("errors") is not None:
if "not found" in response.body["errors"][0]:
response = Response(status_code=404, body=response.body)
else:
response = Response(status_code=400, body=response.body)
if response.status_code // 100 == 5 and helper.allow_sentry() and OR_SESSION_TOKEN is not None and not helper.is_local():
with configure_scope() as scope:
scope.set_tag('stage', environ["stage"])
scope.set_tag('openReplaySessionToken', OR_SESSION_TOKEN)
scope.set_extra("context", event.context)
sentry_sdk.capture_exception(Exception(response.body))
if helper.TRACK_TIME:
print(f"Execution time: {int(time.time() * 1000) - now} ms")
except Exception as e:
if helper.allow_sentry() and OR_SESSION_TOKEN is not None and not helper.is_local():
with configure_scope() as scope:
scope.set_tag('stage', environ["stage"])
scope.set_tag('openReplaySessionToken', OR_SESSION_TOKEN)
scope.set_extra("context", event.context)
sentry_sdk.capture_exception(e)
response = Response(body={"Code": "InternalServerError",
"Message": "An internal server error occurred [level=Fatal]."},
status_code=500)
pg_client.close()
return response
# Open source
app.register_blueprint(bp_authorizers.app)
app.register_blueprint(bp_core.app)
app.register_blueprint(bp_core_crons.app)
app.register_blueprint(bp_core_dynamic.app)
app.register_blueprint(bp_core_dynamic_crons.app)
app.register_blueprint(bp_dashboard.app)
# Enterprise
app.register_blueprint(bp_ee.app)
app.register_blueprint(bp_ee_crons.app)
app.register_blueprint(bp_saml.app)
| 39.224806
| 129
| 0.674704
|
329b83135fb5f32db85a25cc6edebdf84609b601
| 59,476
|
py
|
Python
|
ts2pythonParser.py
|
jecki/ts2python
|
938267e48d8f7ff9620c36e7c9ab871cc2d6daab
|
[
"Apache-2.0"
] | 3
|
2021-10-16T14:27:40.000Z
|
2022-03-04T17:41:14.000Z
|
ts2pythonParser.py
|
jecki/ts2python
|
938267e48d8f7ff9620c36e7c9ab871cc2d6daab
|
[
"Apache-2.0"
] | 1
|
2022-02-03T01:19:57.000Z
|
2022-02-21T10:35:52.000Z
|
ts2pythonParser.py
|
jecki/ts2python
|
938267e48d8f7ff9620c36e7c9ab871cc2d6daab
|
[
"Apache-2.0"
] | 1
|
2021-10-16T07:07:34.000Z
|
2021-10-16T07:07:34.000Z
|
#!/usr/bin/env python3
"""ts2python.py - compiles typescript dataclasses to Python
TypedDicts <https://www.python.org/dev/peps/pep-0589/>
Copyright 2021 by Eckhart Arnold (arnold@badw.de)
Bavarian Academy of Sciences and Humanities (badw.de)
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied. See the License for the specific language governing
permissions and limitations under the License.
"""
#######################################################################
#
# SYMBOLS SECTION - Can be edited. Changes will be preserved.
#
#######################################################################
import datetime
import keyword
from functools import partial, lru_cache
import os
import sys
from typing import Tuple, List, Union, Any, Callable, Set, Dict, Sequence
try:
scriptpath = os.path.dirname(__file__)
except NameError:
scriptpath = ''
if scriptpath not in sys.path:
sys.path.append(scriptpath)
try:
import regex as re
except ImportError:
import re
from DHParser import start_logging, suspend_logging, resume_logging, is_filename, load_if_file, \
Grammar, Compiler, nil_preprocessor, PreprocessorToken, Whitespace, Drop, AnyChar, \
Lookbehind, Lookahead, Alternative, Pop, Text, Synonym, Counted, Interleave, INFINITE, \
Option, NegativeLookbehind, OneOrMore, RegExp, Retrieve, Series, Capture, TreeReduction, \
ZeroOrMore, Forward, NegativeLookahead, Required, CombinedParser, mixin_comment, \
compile_source, grammar_changed, last_value, matching_bracket, PreprocessorFunc, is_empty, \
remove_if, Node, TransformerCallable, TransformationDict, transformation_factory, traverse, \
remove_children_if, normalize_whitespace, is_anonymous, matches_re, \
reduce_single_child, replace_by_single_child, replace_or_reduce, remove_whitespace, \
replace_by_children, remove_empty, remove_tokens, flatten, all_of, any_of, \
merge_adjacent, collapse, collapse_children_if, transform_content, WHITESPACE_PTYPE, \
TOKEN_PTYPE, remove_children, remove_content, remove_brackets, change_tag_name, \
remove_anonymous_tokens, keep_children, is_one_of, not_one_of, has_content, apply_if, peek, \
remove_anonymous_empty, keep_nodes, traverse_locally, strip, lstrip, rstrip, \
transform_content, replace_content_with, forbid, assert_content, remove_infix_operator, \
add_error, error_on, recompile_grammar, left_associative, lean_left, set_config_value, \
get_config_value, node_maker, access_thread_locals, access_presets, PreprocessorResult, \
finalize_presets, ErrorCode, RX_NEVER_MATCH, set_tracer, resume_notices_on, \
trace_history, has_descendant, neg, has_ancestor, optional_last_value, insert, \
positions_of, replace_tag_names, add_attributes, delimit_children, merge_connected, \
has_attr, has_parent, ThreadLocalSingletonFactory, Error, canonical_error_strings, \
has_errors, WARNING, ERROR, FATAL, set_preset_value, get_preset_value, NEVER_MATCH_PATTERN, \
gen_find_include_func, preprocess_includes, make_preprocessor, chain_preprocessors, \
pick_from_context, json_dumps, RootNode, get_config_values, md5, StringView, as_list
#######################################################################
#
# PREPROCESSOR SECTION - Can be edited. Changes will be preserved.
#
#######################################################################
RE_INCLUDE = NEVER_MATCH_PATTERN
# To capture includes, replace the NEVER_MATCH_PATTERN
# by a pattern with group "name" here, e.g. r'\input{(?P<name>.*)}'
def ts2pythonTokenizer(original_text) -> Tuple[str, List[Error]]:
# Here, a function body can be filled in that adds preprocessor tokens
# to the source code and returns the modified source.
return original_text, []
def preprocessor_factory() -> PreprocessorFunc:
# below, the second parameter must always be the same as ts2pythonGrammar.COMMENT__!
find_next_include = gen_find_include_func(RE_INCLUDE, '(?:\\/\\/.*)|(?:\\/\\*(?:.|\\n)*?\\*\\/)')
include_prep = partial(preprocess_includes, find_next_include=find_next_include)
tokenizing_prep = make_preprocessor(ts2pythonTokenizer)
return chain_preprocessors(include_prep, tokenizing_prep)
get_preprocessor = ThreadLocalSingletonFactory(preprocessor_factory, ident=1)
#######################################################################
#
# PARSER SECTION - Don't edit! CHANGES WILL BE OVERWRITTEN!
#
#######################################################################
class ts2pythonGrammar(Grammar):
r"""Parser for a ts2python source file.
"""
arg_list = Forward()
declaration = Forward()
declarations_block = Forward()
document = Forward()
function = Forward()
generic_type = Forward()
literal = Forward()
type = Forward()
types = Forward()
source_hash__ = "e927d30fdd37fb910fbe23aaab9c02da"
disposable__ = re.compile('INT$|NEG$|FRAC$|DOT$|EXP$|EOF$|_array_ellipsis$|_top_level_assignment$|_top_level_literal$|_quoted_identifier$|_root$|_namespace$|_part$')
static_analysis_pending__ = [] # type: List[bool]
parser_initialization__ = ["upon instantiation"]
COMMENT__ = r'(?:\/\/.*)|(?:\/\*(?:.|\n)*?\*\/)'
comment_rx__ = re.compile(COMMENT__)
WHITESPACE__ = r'\s*'
WSP_RE__ = mixin_comment(whitespace=WHITESPACE__, comment=COMMENT__)
wsp__ = Whitespace(WSP_RE__)
dwsp__ = Drop(Whitespace(WSP_RE__))
EOF = Drop(NegativeLookahead(RegExp('.')))
EXP = Option(Series(Alternative(Text("E"), Text("e")), Option(Alternative(Text("+"), Text("-"))), RegExp('[0-9]+')))
DOT = Text(".")
FRAC = Option(Series(DOT, RegExp('[0-9]+')))
NEG = Text("-")
INT = Series(Option(NEG), Alternative(RegExp('[1-9][0-9]+'), RegExp('[0-9]')))
_part = RegExp('(?!\\d)\\w+')
identifier = Series(NegativeLookahead(Alternative(Text("true"), Text("false"))), _part, ZeroOrMore(Series(Text("."), _part)), dwsp__)
_quoted_identifier = Alternative(identifier, Series(Series(Drop(Text('"')), dwsp__), identifier, Series(Drop(Text('"')), dwsp__), mandatory=2), Series(Series(Drop(Text("\'")), dwsp__), identifier, Series(Drop(Text("\'")), dwsp__), mandatory=2))
variable = Series(identifier, ZeroOrMore(Series(Text("."), identifier)))
basic_type = Series(Alternative(Text("object"), Text("array"), Text("string"), Text("number"), Text("boolean"), Text("null"), Text("integer"), Text("uinteger"), Text("decimal"), Text("unknown"), Text("any"), Text("void")), dwsp__)
name = Alternative(identifier, Series(Series(Drop(Text('"')), dwsp__), identifier, Series(Drop(Text('"')), dwsp__)))
association = Series(name, Series(Drop(Text(":")), dwsp__), literal)
object = Series(Series(Drop(Text("{")), dwsp__), Option(Series(association, ZeroOrMore(Series(Series(Drop(Text(",")), dwsp__), association)))), Option(Series(Drop(Text(",")), dwsp__)), Series(Drop(Text("}")), dwsp__))
array = Series(Series(Drop(Text("[")), dwsp__), Option(Series(literal, ZeroOrMore(Series(Series(Drop(Text(",")), dwsp__), literal)))), Series(Drop(Text("]")), dwsp__))
string = Alternative(Series(RegExp('"[^"\\n]*"'), dwsp__), Series(RegExp("'[^'\\n]*'"), dwsp__))
boolean = Series(Alternative(Text("true"), Text("false")), dwsp__)
number = Series(INT, FRAC, EXP, dwsp__)
integer = Series(INT, NegativeLookahead(RegExp('[.Ee]')), dwsp__)
type_tuple = Series(Series(Drop(Text("[")), dwsp__), types, ZeroOrMore(Series(Series(Drop(Text(",")), dwsp__), types)), Series(Drop(Text("]")), dwsp__))
_top_level_literal = Drop(Synonym(literal))
_array_ellipsis = Drop(Series(literal, Drop(ZeroOrMore(Drop(Series(Series(Drop(Text(",")), dwsp__), literal))))))
assignment = Series(variable, Series(Drop(Text("=")), dwsp__), Alternative(literal, variable), Series(Drop(Text(";")), dwsp__))
_top_level_assignment = Drop(Synonym(assignment))
const = Series(Option(Series(Drop(Text("export")), dwsp__)), Series(Drop(Text("const")), dwsp__), declaration, Option(Series(Series(Drop(Text("=")), dwsp__), Alternative(literal, identifier))), Series(Drop(Text(";")), dwsp__), mandatory=2)
item = Series(_quoted_identifier, Option(Series(Series(Drop(Text("=")), dwsp__), literal)))
enum = Series(Option(Series(Drop(Text("export")), dwsp__)), Series(Drop(Text("enum")), dwsp__), identifier, Series(Drop(Text("{")), dwsp__), item, ZeroOrMore(Series(Series(Drop(Text(",")), dwsp__), item)), Option(Series(Drop(Text(",")), dwsp__)), Series(Drop(Text("}")), dwsp__), mandatory=3)
type_name = Synonym(identifier)
equals_type = Series(Series(Drop(Text("=")), dwsp__), Alternative(basic_type, type_name))
extends_type = Series(Series(Drop(Text("extends")), dwsp__), Alternative(basic_type, type_name))
func_type = Series(Series(Drop(Text("(")), dwsp__), Option(arg_list), Series(Drop(Text(")")), dwsp__), Series(Drop(Text("=>")), dwsp__), types)
readonly = Series(Text("readonly"), dwsp__)
index_signature = Series(Option(readonly), Series(Drop(Text("[")), dwsp__), identifier, Alternative(Series(Drop(Text(":")), dwsp__), Series(Series(Drop(Text("in")), dwsp__), Series(Drop(Text("keyof")), dwsp__))), type, Series(Drop(Text("]")), dwsp__))
map_signature = Series(index_signature, Series(Drop(Text(":")), dwsp__), types)
array_type = Alternative(basic_type, generic_type, type_name, Series(Series(Drop(Text("(")), dwsp__), types, Series(Drop(Text(")")), dwsp__)), type_tuple, declarations_block)
extends = Series(Series(Drop(Text("extends")), dwsp__), Alternative(generic_type, type_name), ZeroOrMore(Series(Series(Drop(Text(",")), dwsp__), Alternative(generic_type, type_name))))
array_types = Synonym(array_type)
array_of = Series(Option(Series(Drop(Text("readonly")), dwsp__)), array_types, Series(Drop(Text("[]")), dwsp__))
arg_tail = Series(Series(Drop(Text("...")), dwsp__), identifier, Option(Series(Series(Drop(Text(":")), dwsp__), array_of)))
parameter_type = Alternative(array_of, basic_type, generic_type, Series(type_name, Option(extends_type), Option(equals_type)), declarations_block, type_tuple)
parameter_types = Series(parameter_type, ZeroOrMore(Series(Series(Drop(Text("|")), dwsp__), parameter_type)))
type_parameters = Series(Series(Drop(Text("<")), dwsp__), parameter_types, ZeroOrMore(Series(Series(Drop(Text(",")), dwsp__), parameter_types)), Series(Drop(Text(">")), dwsp__), mandatory=1)
interface = Series(Option(Series(Drop(Text("export")), dwsp__)), Alternative(Series(Drop(Text("interface")), dwsp__), Series(Drop(Text("class")), dwsp__)), identifier, Option(type_parameters), Option(extends), declarations_block, mandatory=2)
type_alias = Series(Option(Series(Drop(Text("export")), dwsp__)), Series(Drop(Text("type")), dwsp__), identifier, Option(type_parameters), Series(Drop(Text("=")), dwsp__), types, Series(Drop(Text(";")), dwsp__), mandatory=2)
module = Series(Series(Drop(Text("declare")), dwsp__), Series(Drop(Text("module")), dwsp__), _quoted_identifier, Series(Drop(Text("{")), dwsp__), document, Series(Drop(Text("}")), dwsp__))
namespace = Series(Option(Series(Drop(Text("export")), dwsp__)), Series(Drop(Text("namespace")), dwsp__), identifier, Series(Drop(Text("{")), dwsp__), ZeroOrMore(Alternative(interface, type_alias, enum, const, Series(Option(Series(Drop(Text("export")), dwsp__)), declaration, Series(Drop(Text(";")), dwsp__)), Series(Option(Series(Drop(Text("export")), dwsp__)), function, Series(Drop(Text(";")), dwsp__)))), Series(Drop(Text("}")), dwsp__), mandatory=2)
intersection = Series(type, OneOrMore(Series(Series(Drop(Text("&")), dwsp__), type, mandatory=1)))
virtual_enum = Series(Option(Series(Drop(Text("export")), dwsp__)), Series(Drop(Text("namespace")), dwsp__), identifier, Series(Drop(Text("{")), dwsp__), ZeroOrMore(Alternative(interface, type_alias, enum, const, Series(declaration, Series(Drop(Text(";")), dwsp__)))), Series(Drop(Text("}")), dwsp__))
_namespace = Alternative(virtual_enum, namespace)
optional = Series(Text("?"), dwsp__)
static = Series(Text("static"), dwsp__)
mapped_type = Series(Series(Drop(Text("{")), dwsp__), map_signature, Option(Series(Drop(Text(";")), dwsp__)), Series(Drop(Text("}")), dwsp__))
qualifiers = Interleave(readonly, static, repetitions=[(0, 1), (0, 1)])
argument = Series(identifier, Option(optional), Option(Series(Series(Drop(Text(":")), dwsp__), types)))
literal.set(Alternative(integer, number, boolean, string, array, object))
generic_type.set(Series(type_name, type_parameters))
type.set(Alternative(array_of, basic_type, generic_type, type_name, Series(Series(Drop(Text("(")), dwsp__), types, Series(Drop(Text(")")), dwsp__)), mapped_type, declarations_block, type_tuple, literal, func_type))
types.set(Series(Alternative(intersection, type), ZeroOrMore(Series(Series(Drop(Text("|")), dwsp__), Alternative(intersection, type)))))
arg_list.set(Alternative(Series(argument, ZeroOrMore(Series(Series(Drop(Text(",")), dwsp__), argument)), Option(Series(Series(Drop(Text(",")), dwsp__), arg_tail))), arg_tail))
function.set(Series(Option(Series(Option(static), Option(Series(Drop(Text("function")), dwsp__)), identifier, Option(optional), Option(type_parameters))), Series(Drop(Text("(")), dwsp__), Option(arg_list), Series(Drop(Text(")")), dwsp__), Option(Series(Series(Drop(Text(":")), dwsp__), types)), mandatory=2))
declaration.set(Series(qualifiers, Option(Alternative(Series(Drop(Text("let")), dwsp__), Series(Drop(Text("var")), dwsp__))), identifier, Option(optional), NegativeLookahead(Text("(")), Option(Series(Series(Drop(Text(":")), dwsp__), types))))
declarations_block.set(Series(Series(Drop(Text("{")), dwsp__), Option(Series(Alternative(function, declaration), ZeroOrMore(Series(Option(Series(Drop(Text(";")), dwsp__)), Alternative(function, declaration))), Option(Series(Series(Drop(Text(";")), dwsp__), map_signature)), Option(Series(Drop(Text(";")), dwsp__)))), Series(Drop(Text("}")), dwsp__)))
document.set(Series(dwsp__, ZeroOrMore(Alternative(interface, type_alias, _namespace, enum, const, module, _top_level_assignment, _array_ellipsis, _top_level_literal, Series(Option(Series(Drop(Text("export")), dwsp__)), declaration, Series(Drop(Text(";")), dwsp__)), Series(Option(Series(Drop(Text("export")), dwsp__)), function, Series(Drop(Text(";")), dwsp__))))))
_root = Series(document, EOF)
resume_rules__ = {'interface': [re.compile(r'(?=export|$)')],
'type_alias': [re.compile(r'(?=export|$)')],
'enum': [re.compile(r'(?=export|$)')],
'const': [re.compile(r'(?=export|$)')],
'declaration': [re.compile(r'(?=export|$)')],
'_top_level_assignment': [re.compile(r'(?=export|$)')],
'_top_level_literal': [re.compile(r'(?=export|$)')],
'module': [re.compile(r'(?=export|$)')]}
root__ = TreeReduction(_root, CombinedParser.MERGE_TREETOPS)
_raw_grammar = ThreadLocalSingletonFactory(ts2pythonGrammar, ident=1)
def get_grammar() -> ts2pythonGrammar:
grammar = _raw_grammar()
if get_config_value('resume_notices'):
resume_notices_on(grammar)
elif get_config_value('history_tracking'):
set_tracer(grammar, trace_history)
try:
if not grammar.__class__.python_src__:
grammar.__class__.python_src__ = get_grammar.python_src__
except AttributeError:
pass
return grammar
def parse_ts2python(document, start_parser = "root_parser__", *, complete_match=True):
return get_grammar()(document, start_parser, complete_match)
#######################################################################
#
# AST SECTION - Can be edited. Changes will be preserved.
#
#######################################################################
ts2python_AST_transformation_table = {
# AST Transformations for the ts2python-grammar
# "<": flatten,
":Text": change_tag_name('TEXT')
# "*": replace_by_single_child
}
def ts2pythonTransformer() -> TransformerCallable:
"""Creates a transformation function that does not share state with other
threads or processes."""
return partial(traverse, transformation_table=ts2python_AST_transformation_table.copy())
get_transformer = ThreadLocalSingletonFactory(ts2pythonTransformer, ident=1)
def transform_ts2python(cst):
get_transformer()(cst)
#######################################################################
#
# COMPILER SECTION - Can be edited. Changes will be preserved.
#
#######################################################################
def source_hash(source_text: str) -> str:
try:
with open(__file__, 'r', encoding='utf-8') as f:
script_hash = md5(f.read())
except (FileNotFoundError, IOError):
script_hash = "source of ts2pythonParser.py not found!?"
return ' '.join([md5(source_text), script_hash])
GENERAL_IMPORTS = """
import sys
from enum import Enum, IntEnum
from functools import singledispatch, singledispatchmethod
if sys.version_info >= (3, 9, 0):
from typing import Union, Optional, Any, Generic, TypeVar, Callable, List, Tuple, Dict
# do not use list, tuple, dict, because contained types won't be forward ref'd
from collections.abc import Coroutine
else:
from typing import Union, List, Tuple, Optional, Dict, Any, Generic, TypeVar, Callable, Coroutine
"""
TYPEDDICT_IMPORTS = """
try:
from ts2python.typeddict_shim import TypedDict, GenericTypedDict, NotRequired, Literal
# Overwrite typing.TypedDict for Runtime-Validation
except ImportError:
# print("Module ts2python.json_validation not found. Only "
# "coarse-grained type-validation of TypedDicts possible")
try:
from typing import TypedDict, Literal
except ImportError:
try:
from ts2python.typing_extensions import TypedDict, Literal
except ImportError:
print(f'Please install the "typing_extensions" module via the shell '
f'command "# pip install typing_extensions" before running '
f'{__file__} with Python-versions <= 3.7!')
try:
from typing_extensions import NotRequired
except ImportError:
NotRequired = Optional
if sys.version_info >= (3, 7, 0): GenericMeta = type
else:
from typing import GenericMeta
class _GenericTypedDictMeta(GenericMeta):
def __new__(cls, name, bases, ns, total=True):
return type.__new__(_GenericTypedDictMeta, name, (dict,), ns)
__call__ = dict
GenericTypedDict = _GenericTypedDictMeta('TypedDict', (dict,), {})
GenericTypedDict.__module__ = __name__
"""
PEP655_IMPORTS = """
"""
def to_typename(varname: str) -> str:
# assert varname[-1:] != '_' or keyword.iskeyword(varname[:-1]), varname # and varname[0].islower()
return varname[0].upper() + varname[1:] + '_'
def to_varname(typename: str) -> str:
assert typename[0].isupper() or typename[-1:] == '_', typename
return typename[0].lower() + (typename[1:-1] if typename[-1:] == '_' else typename[1:])
NOT_YET_IMPLEMENTED_WARNING = ErrorCode(310)
UNSUPPORTED_WARNING = ErrorCode(320)
TYPE_NAME_SUBSTITUTION = {
'object': 'Dict',
'array': 'List',
'string': 'str',
'number': 'float',
'decimal': 'float',
'integer': 'int',
'uinteger': 'int',
'boolean': 'bool',
'null': 'None',
'undefined': 'None',
'unknown': 'Any',
'any': 'Any',
'void': 'None',
'Thenable': 'Coroutine',
'Array': 'List',
'ReadonlyArray': 'List',
'Uint32Array': 'List[int]',
'Error': 'Exception',
'RegExp': 'str' }
class ts2pythonCompiler(Compiler):
"""Compiler for the abstract-syntax-tree of a ts2python source file.
"""
def reset(self):
super().reset()
bcn = get_config_value('ts2python.BaseClassName', 'TypedDict')
i = bcn.rfind('.')
if i >= 0:
self.additional_imports = f'\nfrom {bcn[:i]} import {bcn[i + 1:]}\n'
bcn = bcn[i + 1:]
else:
self.additional_imports = ''
self.base_class_name = bcn
self.class_decorator = get_config_value('ts2python.ClassDecorator', '').strip()
if self.class_decorator:
if self.class_decorator[0] != '@':
self.class_decorator = '@' + self.class_decorator
self.class_decorator += '\n'
self.use_enums = get_config_value('ts2python.UseEnum', True)
self.use_type_union = get_config_value('ts2python.UseTypeUnion', False)
self.use_literal_type = get_config_value('ts2python.UseLiteralType', True)
self.use_not_required = get_config_value('ts2python.UseNotRequired', False)
self.overloaded_type_names: Set[str] = set()
self.known_types: List[Set[str]] = [
{'Union', 'List', 'Tuple', 'Optional', 'Dict', 'Any',
'Generic', 'Coroutine', 'list'}]
self.local_classes: List[List[str]] = [[]]
self.base_classes: Dict[str, List[str]] = {}
self.typed_dicts: Set[str] = {'TypedDict'} # names of classes that are TypedDicts
# self.default_values: Dict = {}
# self.referred_objects: Dict = {}
self.basic_type_aliases: Set[str] = set()
self.obj_name: List[str] = ['TOPLEVEL_']
self.scope_type: List[str] = ['']
self.optional_keys: List[List[str]] = [[]]
self.func_name: str = '' # name of the current functions header or ''
self.strip_type_from_const = False
def compile(self, node) -> str:
result = super().compile(node)
if isinstance(result, str):
return result
raise TypeError(f"Compilation of {node.tag_name} yielded a result of "
f"type {str(type(result))} and not str as expected!")
def is_toplevel(self) -> bool:
return self.obj_name == ['TOPLEVEL_']
def is_known_type(self, typename: str) -> bool:
for type_set in self.known_types:
if typename in type_set:
return True
return False
# def qualified_obj_name(self, pos: int=0, varname: bool=False) -> str:
# obj_name = self.obj_name[1:] if len(self.obj_name) > 1 else self.obj_name
# if pos < 0: obj_name = obj_name[:pos]
# if varname: obj_name = obj_name[:-1] + [to_varname(obj_name[-1])]
# return '.'.join(obj_name)
def prepare(self, root: Node) -> None:
type_aliases = {nd['identifier'].content for nd in root.select_children('type_alias')}
namespaces = {nd['identifier'].content for nd in root.select_children('namespace')}
self.overloaded_type_names = type_aliases & namespaces
return None
def finalize(self, python_code: Any) -> Any:
chksum = f'source_hash__ = "{source_hash(self.tree.source)}"'
if self.tree.tag_name == 'document':
code_blocks = [
f'# Generated by ts2python on {datetime.datetime.now()}\n',
GENERAL_IMPORTS, TYPEDDICT_IMPORTS, self.additional_imports,
chksum, '\n##### BEGIN OF LSP SPECS\n'
]
if self.base_class_name == 'TypedDict':
code_blocks.append(PEP655_IMPORTS)
else:
code_blocks = []
code_blocks.append(python_code)
if self.tree.tag_name == 'document':
code_blocks.append('\n##### END OF LSP SPECS\n')
cooked = '\n\n'.join(code_blocks)
cooked = re.sub(' +(?=\n)', '', cooked)
return re.sub(r'\n\n\n+', '\n\n\n', cooked)
def on_EMPTY__(self, node) -> str:
return ''
def on_ZOMBIE__(self, node) -> str:
self.tree.new_error(node,
"Malformed syntax-tree! Possibly caused by a parsing error.")
return ""
# raise ValueError('Malformed syntax-tree!')
def on_document(self, node) -> str:
if 'module' in node and isinstance(node['module'], Sequence) > 1:
self.tree.new_error(
node, 'Transpiling more than a single ambient module '
'is not yet implemented! Only the first ambient module will '
'be transpiled for now.', NOT_YET_IMPLEMENTED_WARNING)
return self.compile(node['module'][0]['document'])
self.mark_overloaded_functions(node)
return '\n\n'.join(self.compile(child) for child in node.children
if child.tag_name != 'declaration')
def on_module(self, node) -> str:
name = self.compile(node['identifier'])
return self.compile(node['document'])
def render_class_header(self, name: str,
base_classes: str,
force_base_class: str = '') -> str:
optional_key_list = self.optional_keys.pop()
decorator = self.class_decorator
base_class_name = (force_base_class or self.base_class_name).strip()
if base_class_name == 'TypedDict':
total = not bool(optional_key_list) or self.use_not_required
if base_classes:
if base_classes.find('Generic[') >= 0:
td_name = 'GenericTypedDict'
else:
td_name = 'TypedDict'
if self.use_not_required:
return decorator + \
f"class {name}({base_classes}, {td_name}):\n"
else:
return decorator + f"class {name}({base_classes}, "\
f"{td_name}, total={total}):\n"
else:
if self.use_not_required:
return decorator + f"class {name}(TypedDict):\n"
else:
return decorator + \
f"class {name}(TypedDict, total={total}):\n"
else:
if base_classes:
if base_class_name:
return decorator + \
f"class {name}({base_classes}, {base_class_name}):\n"
else:
return decorator + f"class {name}({base_classes}):\n"
else:
if base_class_name:
return decorator + f"class {name}({base_class_name}):\n"
else:
return decorator + f"class {name}:\n"
def render_local_classes(self) -> str:
self.func_name = ''
classes = self.local_classes.pop()
return '\n'.join(lc for lc in classes) + '\n' if classes else ''
def process_type_parameters(self, node: Node) -> Tuple[str, str]:
try:
tp = self.compile(node['type_parameters'])
tp = tp.strip("'")
preface = f"{tp} = TypeVar('{tp}')\n"
self.known_types[-1].add(tp)
except KeyError:
tp = ''
preface = ''
return tp, preface
def on_interface(self, node) -> str:
name = self.compile(node['identifier'])
self.obj_name.append(name)
self.scope_type.append('interface')
self.local_classes.append([])
self.optional_keys.append([])
tp, preface = self.process_type_parameters(node)
preface += '\n'
preface += node.get_attr('preface', '')
self.known_types.append(set())
base_class_list = []
try:
base_class_list = self.bases(node['extends'])
base_classes = self.compile(node['extends'])
if tp:
base_classes += f", Generic[{tp}]"
except KeyError:
base_classes = f"Generic[{tp}]" if tp else ''
if any(bc not in self.typed_dicts for bc in base_class_list):
force_base_class = ' '
elif 'function' in node['declarations_block']:
force_base_class = ' ' # do not derive from TypeDict
else:
force_base_class = ''
self.typed_dicts.add(name)
decls = self.compile(node['declarations_block'])
interface = self.render_class_header(name, base_classes, force_base_class)
self.base_classes[name] = base_class_list
interface += (' ' + self.render_local_classes().replace('\n', '\n ')).rstrip(' ')
self.known_types.pop()
self.known_types[-1].add(name)
self.scope_type.pop()
self.obj_name.pop()
return preface + interface + ' ' + decls.replace('\n', '\n ')
# def on_type_parameter(self, node) -> str: # OBSOLETE, see on_type_parameters()
# return self.compile(node['identifier'])
@lru_cache(maxsize=4)
def bases(self, node) -> List[str]:
assert node.tag_name == 'extends'
bases = [self.compile(nd) for nd in node.children]
return [TYPE_NAME_SUBSTITUTION.get(bc, bc) for bc in bases]
def on_extends(self, node) -> str:
return ', '.join(self.bases(node))
def on_type_alias(self, node) -> str:
alias = self.compile(node['identifier'])
if all(typ[0].tag_name in ('basic_type', 'literal')
for typ in node.select('type')):
self.basic_type_aliases.add(alias)
self.obj_name.append(alias)
if alias not in self.overloaded_type_names:
self.known_types[-1].add(alias)
self.local_classes.append([])
self.optional_keys.append([])
types = self.compile(node['types'])
preface = self.render_local_classes()
code = preface + f"{alias} = {types}"
else:
code = ''
self.obj_name.pop()
return code
def mark_overloaded_functions(self, scope: Node):
is_interface = self.scope_type[-1] == 'interface'
first_use: Dict[str, Node] = dict()
try:
for func_decl in as_list(scope['function']):
name = func_decl['identifier'].content
if keyword.iskeyword(name):
name += '_'
if name in first_use:
first_use[name].attr['decorator'] = \
'@singledispatchmethod' if is_interface \
else '@singledispatch'
func_decl.attr['decorator'] = f'@{name}.register'
else:
first_use[name] = func_decl
except KeyError:
pass # no functions in declarations block
def on_declarations_block(self, node) -> str:
self.mark_overloaded_functions(node)
declarations = '\n'.join(self.compile(nd) for nd in node
if nd.tag_name in ('declaration', 'function'))
return declarations or "pass"
def on_declaration(self, node) -> str:
identifier = self.compile(node['identifier'])
self.obj_name.append(to_typename(identifier))
T = self.compile_type_expression(node, node['types']) \
if 'types' in node else 'Any'
typename = self.obj_name.pop()
if T[0:5] == 'class':
self.local_classes[-1].append(T)
T = typename # substitute typename for type
if 'optional' in node:
self.optional_keys[-1].append(identifier)
if self.use_not_required:
T = f"NotRequired[{T}]"
else:
if T.startswith('Union['):
if T.find('None') < 0:
T = T[:-1] + ', None]'
elif T.find('|') >= 0:
if T.find('None') < 0:
T += '|None'
else:
T = f"Optional[{T}]"
if self.is_toplevel() and bool(self.local_classes[-1]):
preface = self.render_local_classes()
self.local_classes.append([])
self.optional_keys.append([])
return preface + f"{identifier}: {T}"
return f"{identifier}: {T}"
def on_function(self, node) -> str:
is_constructor = False
if 'identifier' in node:
name = self.compile(node["identifier"])
self.func_name = name
if name == 'constructor' and self.scope_type[-1] == 'interface':
name = self.obj_name[-1] + 'Constructor'
is_constructor = True
else: # anonymous function
name = "__call__"
tp, preface = self.process_type_parameters(node)
try:
arguments = self.compile(node['arg_list'])
if self.scope_type[-1] == 'interface':
arguments = 'self, ' + arguments
except KeyError:
arguments = 'self' if self.scope_type[-1] == 'interface' else ''
try:
return_type = self.compile(node['types'])
except KeyError:
return_type = 'Any'
decorator = node.get_attr('decorator', '')
if decorator:
if decorator.endswith('.register'): name = '_'
decorator += '\n'
pyfunc = f"{preface}\n{decorator}def {name}({arguments}) -> {return_type}:\n pass"
if is_constructor:
interface = pick_from_context(self.context, 'interface', reverse=True)
assert interface
interface.attr['preface'] = ''.join([
interface.get_attr('preface', ''), pyfunc, '\n'])
return ''
else:
return pyfunc
def on_arg_list(self, node) -> str:
breadcrumb = '/'.join(nd.tag_name for nd in self.context)
if breadcrumb.rfind('func_type') > breadcrumb.rfind('function'):
arg_list = [self.compile(nd) for nd in node.children]
if any(arg[0:1] == '*' for arg in arg_list):
return '...'
return ', '.join(re.sub(r'^\w+\s*:\s*', '', arg) for arg in arg_list)
return ', '.join(self.compile(nd) for nd in node.children)
def on_arg_tail(self, node):
argname = self.compile(node["identifier"])
if 'array_of' in node:
type = self.compile(node['array_of'])[5:-1]
return f'*{argname}: {type}'
else:
return '*' + argname
def on_argument(self, node) -> str:
argname = self.compile(node["identifier"])
if 'types' in node:
# types = self.compile(node['types'])
self.obj_name.append(to_typename(argname))
types = self.compile_type_expression(node, node['types'])
self.obj_name.pop()
if 'optional' in node:
types = f'Optional[{types}] = None'
return f'{argname}: {types}'
else:
return f'{argname} = None' if 'optional' in node else argname
def on_optional(self, node):
assert False, "This method should never have been called!"
def on_index_signature(self, node) -> str:
return self.compile(node['type'])
def on_types(self, node) -> str:
union = []
i = 0
for nd in node.children:
obj_name_stub = self.obj_name[-1]
n = obj_name_stub.rfind('_')
ending = obj_name_stub[n + 1:]
if n >= 0 and (not ending or ending.isdecimal()):
obj_name_stub = obj_name_stub[:n]
fname = self.func_name[:1].upper() + self.func_name[1:]
self.obj_name[-1] = fname + obj_name_stub + '_' + str(i)
typ = self.compile_type_expression(node, nd)
if typ not in union:
union.append(typ)
i += 1
self.obj_name[-1] = obj_name_stub
for i in range(len(union)):
typ = union[i]
if typ[0:5] == 'class':
cname = re.match(r"class\s*(\w+)[\w(){},' =]*\s*:", typ).group(1)
self.local_classes[-1].append(typ)
union[i] = cname
if self.is_toplevel():
preface = self.render_local_classes()
self.local_classes.append([])
self.optional_keys.append([])
else:
preface = ''
if self.use_literal_type and \
any(nd[0].tag_name == 'literal' for nd in node.children):
assert all(nd[0].tag_name == 'literal' for nd in node.children)
return f"Literal[{', '.join(union)}]"
elif self.use_type_union or len(union) <= 1:
return preface + '|'.join(union)
else:
return preface + f"Union[{', '.join(union)}]"
def on_type(self, node) -> str:
assert len(node.children) == 1
typ = node[0]
if typ.tag_name == 'declarations_block':
self.local_classes.append([])
self.optional_keys.append([])
decls = self.compile(typ)
return ''.join([self.render_class_header(self.obj_name[-1], '') + " ",
self.render_local_classes().replace('\n', '\n '),
decls.replace('\n', '\n ')]) # maybe add one '\n'?
# return 'Dict'
elif typ.tag_name == 'literal':
literal_typ = typ[0].tag_name
if self.use_literal_type:
return f"Literal[{self.compile(typ)}]"
elif literal_typ == 'array':
return 'List'
elif literal_typ == 'object':
return 'Dict'
elif literal_typ in ('number', 'integer'):
literal = self.compile(typ)
try:
_ = int(literal)
return 'int'
except ValueError:
return 'str'
else:
assert literal_typ == 'string', literal_typ
literal = self.compile(typ)
return 'str'
else:
return self.compile(typ)
def on_type_tuple(self, node):
return 'Tuple[' + ', '.join(self.compile(nd) for nd in node) + ']'
def on_mapped_type(self, node) -> str:
return self.compile(node['map_signature'])
def on_map_signature(self, node) -> str:
return "Dict[%s, %s]" % (self.compile(node['index_signature']),
self.compile(node['types']))
def on_func_type(self, node) -> str:
if 'arg_list' in node:
arg_list = self.compile(node["arg_list"])
if arg_list.find('= None') >= 0 or arg_list.find('*') >= 0:
# See https://docs.python.org/3/library/typing.html#typing.Callable
args = '...'
else:
args = f'[{arg_list}]'
else:
args = '[]'
types = self.compile(node["types"])
return f'Callable[{args}, {types}]'
def on_intersection(self, node) -> str:
# ignore intersection
self.tree.new_error(node, 'Type intersections are not yet implemented',
NOT_YET_IMPLEMENTED_WARNING)
return "Any"
def on_virtual_enum(self, node) -> str:
name = self.compile(node['identifier'])
if self.is_known_type(name):
# self.tree.new_error(node,
# f'Name {name} has already been defined earlier!', WARNING)
return ''
self.known_types[-1].add(name)
save = self.strip_type_from_const
if all(child.tag_name == 'const' for child in node.children[1:]):
if all(nd['literal'][0].tag_name == 'integer'
for nd in node.select_children('const') if 'literal' in nd):
header = f'class {name}(IntEnum):'
else:
header = f'class {name}(Enum):'
self.strip_type_from_const = True
else:
header = ''
namespace = []
for child in node.children[1:]:
namespace.append(self.compile(child))
if not header:
header = self.render_class_header(name, '')[:-1] # leave out the trailing "\n"
namespace.insert(0, header)
self.strip_type_from_const = save
return '\n '.join(namespace)
def on_namespace(self, node) -> str:
# errmsg = "Transpilation of namespaces that contain more than just " \
# "constant definitions has not yet been implemented."
# self.tree.new_error(node, errmsg, NOT_YET_IMPLEMENTED_WARNING)
# return "# " + errmsg
name = self.compile(node['identifier'])
declarations = [f'class {name}:']
assert len(node.children) >= 2
declaration = self.compile(node[1])
declaration = declaration.lstrip('\n')
declarations.extend(declaration.split('\n'))
for nd in node[2:]:
declaration = self.compile(nd)
declarations.extend(declaration.split('\n'))
return '\n '.join(declarations)
def on_enum(self, node) -> str:
if self.use_enums:
if all(nd['literal'][0].tag_name == 'integer' for
nd in node.select_children('item') if 'literal' in nd):
base_class = '(IntEnum)'
else:
base_class = '(Enum)'
else:
base_class = ''
name = self.compile(node['identifier'])
self.known_types[-1].add(name)
enum = ['class ' + name + base_class + ':']
for item in node.select_children('item'):
enum.append(self.compile(item))
return '\n '.join(enum)
def on_item(self, node) -> str:
if len(node.children) == 1:
identifier = self.compile(node[0])
if self.use_enums:
return identifier + ' = enum.auto()'
else:
return identifier + ' = ' + repr(identifier)
else:
return self.compile(node['identifier']) + ' = ' + self.compile(node['literal'])
def on_const(self, node) -> str:
if 'literal' in node or 'identifier' in node:
if self.strip_type_from_const:
return self.compile(node['declaration']['identifier']) \
+ ' = ' + self.compile(node[-1])
else:
return self.compile(node['declaration']) + ' = ' + self.compile(node[-1])
else:
# const without assignment, e.g. "export const version: string;"
return self.compile(node['declaration'])
def on_assignment(self, node) -> str:
return self.compile(node['variable']) + ' = ' + self.compile(node[1])
def on_literal(self, node) -> str:
assert len(node.children) == 1
return self.compile(node[0])
def on_integer(self, node) -> str:
return node.content
def on_number(self, node) -> str:
return node.content
def on_boolean(self, node) -> str:
return {'true': 'True', 'false': 'False'}[node.content]
def on_string(self, node) -> str:
return node.content
def on_array(self, node) -> str:
return '[' + \
', '.join(self.compile(nd) for nd in node.children) + \
']'
def on_object(self, node) -> str:
return '{\n ' + \
',\n '.join(self.compile(nd) for nd in node.children) + \
'\n}'
def on_association(self, node) -> str:
return f'"{self.compile(node["name"])}": ' + self.compile(node['literal'])
def on_name(self, node) -> str:
return node.content
def on_basic_type(self, node) -> str:
return TYPE_NAME_SUBSTITUTION[node.content]
def on_generic_type(self, node) -> str:
base_type = self.compile(node['type_name'])
parameters = self.compile(node['type_parameters'])
if parameters == 'None':
return base_type
else:
return ''.join([base_type, '[', parameters, ']'])
def on_type_parameters(self, node) -> str:
type_parameters = [self.compile(nd) for nd in node.children]
return ', '.join(type_parameters)
def on_parameter_types(self, node) -> str:
return self.on_types(node)
def on_parameter_type(self, node) -> str:
if len(node.children) > 1:
node.result = (node[0],) # ignore extends_type and equals_type for now
return self.on_type(node)
def on_extends_type(self, node) -> str:
# TODO: generate TypeVar with restrictions
self.tree.new_error(node, "restrictied generics not yet implemented",
NOT_YET_IMPLEMENTED_WARNING)
return ""
def on_equals_type(self, node) -> str:
# TODO: generate TypeVar with restrictions
self.tree.new_error(node, "restrictied generics not yet implemented",
NOT_YET_IMPLEMENTED_WARNING)
return ""
def on_type_name(self, node) -> str:
name = self.compile(node['identifier'])
return TYPE_NAME_SUBSTITUTION.get(name, name)
def compile_type_expression(self, node, type_node):
unknown_types = set(tn.content for tn in node.select('type_name')
if not self.is_known_type(tn.content))
type_expression = self.compile(type_node)
for typ in unknown_types:
rx = re.compile(r"(?:(?<=[^\w'])|^)" + typ + r"(?:(?=[^\w'])|$)")
segments = type_expression.split("'")
for i in range(0, len(segments), 2):
segments[i] = rx.sub(f"'{typ}'", segments[i])
type_expression = "'".join(segments)
# type_expression = rx.sub(f"'{typ}'", type_expression)
if type_expression[0:1] == "'":
type_expression = ''.join(["'", type_expression.replace("'", ""), "'"])
return type_expression
def on_array_of(self, node) -> str:
assert len(node.children) == 1
element_type = self.compile_type_expression(node, node[0])
return 'List[' + element_type + ']'
def on_array_types(self, node) -> str:
return self.on_types(node)
def on_array_type(self, node) -> str:
return self.on_type(node)
def on_qualifiers(self, node):
assert False, "Qualifiers should be ignored and this method should never be called!"
def on_variable(self, node) -> str:
return node.content
def on_identifier(self, node) -> str:
identifier = node.content
if keyword.iskeyword(identifier):
identifier += '_'
return identifier
get_compiler = ThreadLocalSingletonFactory(ts2pythonCompiler, ident=1)
def compile_ts2python(ast):
return get_compiler()(ast)
#######################################################################
#
# END OF DHPARSER-SECTIONS
#
#######################################################################
RESULT_FILE_EXTENSION = ".sxpr" # Change this according to your needs!
def compile_src(source: str) -> Tuple[Any, List[Error]]:
"""Compiles ``source`` and returns (result, errors)."""
result_tuple = compile_source(source, get_preprocessor(), get_grammar(), get_transformer(),
get_compiler())
return result_tuple[:2] # drop the AST at the end of the result tuple
def serialize_result(result: Any) -> Union[str, bytes]:
"""Serialization of result. REWRITE THIS, IF YOUR COMPILATION RESULT
IS NOT A TREE OF NODES.
"""
if isinstance(result, Node):
return result.serialize(how='default' if RESULT_FILE_EXTENSION != '.xml' else 'xml')
elif isinstance(result, (str, StringView)):
return result
else:
return repr(result)
def process_file(source: str, result_filename: str = '') -> str:
"""Compiles the source and writes the serialized results back to disk,
unless any fatal errors have occurred. Error and Warning messages are
written to a file with the same name as `result_filename` with an
appended "_ERRORS.txt" or "_WARNINGS.txt" in place of the name's
extension. Returns the name of the error-messages file or an empty
string, if no errors of warnings occurred.
"""
source_filename = source if is_filename(source) else ''
if os.path.isfile(result_filename):
with open(result_filename, 'r', encoding='utf-8') as f:
result = f.read()
if source_filename == source:
with open(source_filename, 'r', encoding='utf-8') as f:
source = f.read()
m = re.search('source_hash__ *= *"([\w.!? ]*)"', result)
if m and m.groups()[-1] == source_hash(source):
return '' # no re-compilation necessary, because source hasn't changed
result, errors = compile_src(source)
if not has_errors(errors, FATAL):
if os.path.abspath(source_filename) != os.path.abspath(result_filename):
with open(result_filename, 'w', encoding='utf-8') as f:
f.write(serialize_result(result))
else:
errors.append(Error('Source and destination have the same name "%s"!'
% result_filename, 0, FATAL))
if errors:
err_ext = '_ERRORS.txt' if has_errors(errors, ERROR) else '_WARNINGS.txt'
err_filename = os.path.splitext(result_filename)[0] + err_ext
with open(err_filename, 'w') as f:
f.write('\n'.join(canonical_error_strings(errors)))
return err_filename
return ''
def batch_process(file_names: List[str], out_dir: str,
*, submit_func: Callable = None,
log_func: Callable = None) -> List[str]:
"""Compiles all files listed in filenames and writes the results and/or
error messages to the directory `our_dir`. Returns a list of error
messages files.
"""
error_list = []
def gen_dest_name(name):
return os.path.join(out_dir, os.path.splitext(os.path.basename(name))[0] \
+ RESULT_FILE_EXTENSION)
def run_batch(submit_func: Callable):
nonlocal error_list
err_futures = []
for name in file_names:
dest_name = gen_dest_name(name)
err_futures.append(submit_func(process_file, name, dest_name))
for file_name, err_future in zip(file_names, err_futures):
error_filename = err_future.result()
if log_func:
log_func('Compiling "%s"' % file_name)
if error_filename:
error_list.append(error_filename)
if submit_func is None:
import concurrent.futures
from DHParser.toolkit import instantiate_executor
with instantiate_executor(get_config_value('batch_processing_parallelization'),
concurrent.futures.ProcessPoolExecutor) as pool:
run_batch(pool.submit)
else:
run_batch(submit_func)
return error_list
INSPECT_TEMPLATE = """<h2>{testname}</h2>
<h3>Test source</h3>
<div style="background-color: cornsilk;">
<code style="white-space: pre-wrap;">{test_source}
</code>
</div>
<h3>AST</h3>
<div style="background-color: antiquewhite;">
<code style="white-space: pre-wrap;">{ast_str}
</code>
</div>
<h3>Python</h3>
<div style="background-color: yellow;">
<code style="white-space: pre-wrap;">{code}
</code>
</div>
"""
def inspect(test_file_path: str):
assert test_file_path[-4:] == '.ini'
from DHParser.testing import unit_from_file
test_unit = unit_from_file(test_file_path, additional_stages={'py'})
grammar = get_grammar()
transformer = get_transformer()
compiler = get_compiler()
results = []
for parser in test_unit:
for testname, test_source in test_unit[parser].get('match', dict()).items():
ast = grammar(test_source, parser)
transformer(ast)
ast_str = ast.as_tree()
code = compiler(ast)
results.append(INSPECT_TEMPLATE.format(
testname=testname,
test_source=test_source.replace('<', '<').replace('>', '>'),
ast_str=ast_str.replace('<', '<').replace('>', '>'),
code=code.replace('<', '<').replace('>', '>')))
test_file_name = os.path.basename(test_file_path)
results_str = '\n '.join(results)
html = f'''<!DOCTYPE html>\n<html>
<head><meta charset="utf-8"><title>{test_file_name}</title></head>
<body>
<h1>{test_file_name}</h1>
{results_str}\n</body>\n</html>'''
destdir = os.path.join(os.path.dirname(test_file_path), "REPORT")
if not os.path.exists(destdir): os.mkdir(destdir)
destpath = os.path.join(destdir, test_file_name[:-4] + '.html')
with open(destpath, 'w', encoding='utf-8') as f:
f.write(html)
import webbrowser
webbrowser.open('file://' + destpath if sys.platform == "darwin" else destpath)
def main():
# recompile grammar if needed
script_path = os.path.abspath(__file__)
script_name = os.path.basename(script_path)
if script_name.endswith('Parser.py'):
base_path = script_path[:-9]
else:
base_path = os.path.splitext(script_path)[0]
grammar_path = base_path + '.ebnf'
parser_update = False
def notify():
global parser_update
parser_update = True
print('recompiling ' + grammar_path)
if os.path.exists(grammar_path) and os.path.isfile(grammar_path):
if not recompile_grammar(grammar_path, script_path, force=False, notify=notify):
error_file = base_path + '_ebnf_ERRORS.txt'
with open(error_file, encoding="utf-8") as f:
print(f.read())
sys.exit(1)
elif parser_update:
print(os.path.basename(__file__) + ' has changed. '
'Please run again in order to apply updated compiler')
sys.exit(0)
else:
print('Could not check whether grammar requires recompiling, '
'because grammar was not found at: ' + grammar_path)
from argparse import ArgumentParser
parser = ArgumentParser(description="Parses a ts2python-file and shows its syntax-tree.")
parser.add_argument('files', nargs='+')
parser.add_argument('-D', '--debug', action='store_const', const='debug',
help='Store debug information in LOGS subdirectory')
parser.add_argument('-o', '--out', nargs=1, default=['out'],
help='Output directory for batch processing')
parser.add_argument('-v', '--verbose', action='store_const', const='verbose',
help='Verbose output')
parser.add_argument('--singlethread', action='store_const', const='singlethread',
help='Run batch jobs in a single thread (recommended only for debugging)')
parser.add_argument('-c', '--compatibility', nargs=1, action='extend', type=str,
help='Minimal required python version (must be >= 3.6)')
parser.add_argument('-b', '--base', nargs=1, action='extend', type=str,
help='Base class name, e.g. TypedDict (default) or BaseModel (pydantic)')
parser.add_argument('-d', '--decorator', nargs=1, action='extend', type=str,
help="addes the given decorator ")
parser.add_argument('-p', '--peps', nargs='+', action='extend', type=str,
help='Assume Python-PEPs, e.g. 655 or ~655')
args = parser.parse_args()
file_names, out, log_dir = args.files, args.out[0], ''
workdir = file_names[0] if os.path.isdir(file_names[0]) else os.path.dirname(file_names[0])
from DHParser.configuration import read_local_config
read_local_config(os.path.join(workdir, 'ts2python/ts2pythonParser.ini'))
if args.debug or args.compatibility or args.base or args.decorator or args.peps:
access_presets()
if args.debug is not None:
log_dir = 'LOGS'
set_preset_value('history_tracking', True)
set_preset_value('resume_notices', True)
set_preset_value('log_syntax_trees', frozenset(['cst', 'ast'])) # don't use a set literal, here
if args.compatibility:
version_info = tuple(int(part) for part in args.compatibility[0].split('.'))
if version_info >= (3, 10):
set_preset_value('ts2python.UseTypeUnion', True, allow_new_key=True)
if args.base: set_preset_value('ts2python.BaseClassName', args.base[0].strip())
if args.decorator: set_preset_value('ts2python.ClassDecorator', args.decorator[0].strip())
if args.peps:
args_peps = [pep.strip() for pep in args.peps]
all_peps = {'435', '584', '604', '655', '~435', '~584', '~604', '~655'}
if not all(pep in all_peps for pep in args_peps):
print(f'Unsupported PEPs specified: {args_peps}\n'
'Allowed PEP arguments are:\n'
' 435 - use Enums (Python 3.4)\n'
' 604 - use type union (Python 3.10)\n'
' 584 - use Literal type (Python 3.8)\n'
' 655 - use NotRequired instead of Optional\n')
sys.exit(1)
for pep in args_peps:
kwargs= {'value': pep[0] != '~', 'allow_new_key': True}
if pep == '435': set_preset_value('ts2python.UseEnum', **kwargs)
if pep == '584': set_preset_value('ts2python.UseLiteralType', **kwargs)
if pep == '604': set_preset_value('ts2python.TypeUnion', **kwargs)
if pep == '655': set_preset_value('ts2python.UseNotRequired', **kwargs)
finalize_presets()
_ = get_config_values('ts2python.*') # fill config value cache
start_logging(log_dir)
if args.singlethread:
set_config_value('batch_processing_parallelization', False)
def echo(message: str):
if args.verbose:
print(message)
batch_processing = True
if len(file_names) == 1:
if os.path.isdir(file_names[0]):
dir_name = file_names[0]
echo('Processing all files in directory: ' + dir_name)
file_names = [os.path.join(dir_name, fn) for fn in os.listdir(dir_name)
if os.path.isfile(os.path.join(dir_name, fn))]
elif not ('-o' in sys.argv or '--out' in sys.argv):
batch_processing = False
if batch_processing:
if not os.path.exists(out):
os.mkdir(out)
elif not os.path.isdir(out):
print('Output directory "%s" exists and is not a directory!' % out)
sys.exit(1)
error_files = batch_process(file_names, out, log_func=print if args.verbose else None)
if error_files:
category = "ERRORS" if any(f.endswith('_ERRORS.txt') for f in error_files) \
else "warnings"
print("There have been %s! Please check files:" % category)
print('\n'.join(error_files))
if category == "ERRORS":
sys.exit(1)
elif file_names[0][-4:] == '.ini':
inspect(file_names[0])
else:
assert file_names[0].lower().endswith('.ts')
error_file = process_file(file_names[0], file_names[0][:-3] + '.py')
if error_file:
with open(error_file, 'r', encoding='utf-8') as f:
print(f.read())
if __name__ == "__main__":
main()
| 46.034056
| 458
| 0.602613
|
232d604c0918f480ded5b757ba50b2a407cf8919
| 17,728
|
py
|
Python
|
knowledgerepr/fieldnetwork.py
|
svdwoude/aurum-datadiscovery
|
2e569f6886065819a63e0b55561b94bccd4a078a
|
[
"MIT"
] | null | null | null |
knowledgerepr/fieldnetwork.py
|
svdwoude/aurum-datadiscovery
|
2e569f6886065819a63e0b55561b94bccd4a078a
|
[
"MIT"
] | null | null | null |
knowledgerepr/fieldnetwork.py
|
svdwoude/aurum-datadiscovery
|
2e569f6886065819a63e0b55561b94bccd4a078a
|
[
"MIT"
] | null | null | null |
import matplotlib.pyplot as plt
import operator
import networkx as nx
import os
from collections import defaultdict
from api.apiutils import DRS
from api.apiutils import Operation
from api.apiutils import OP
from api.apiutils import Hit
from api.apiutils import Relation
from api.apiutils import compute_field_id
from api.annotation import MRS
def build_hit(sn, fn):
nid = compute_field_id(sn, fn)
return Hit(nid, sn, fn, -1)
class FieldNetwork:
# The core graph
__G = nx.MultiGraph()
__id_names = dict()
__source_ids = defaultdict(list)
def __init__(self, graph=None, id_names=None, source_ids=None):
if graph is None:
self.__G = nx.MultiGraph()
else:
self.__G = graph
self.__id_names = id_names
self.__source_ids = source_ids
def graph_order(self):
return len(self.__id_names.keys())
def get_number_tables(self):
return len(self.__source_ids.keys())
def iterate_ids(self):
for k, _ in self.__id_names.items():
yield k
def iterate_ids_text(self):
for k, v in self.__id_names.items():
(db_name, source_name, field_name, data_type) = v
if data_type == 'T':
yield k
def iterate_values(self) -> (str, str, str, str):
for _, v in self.__id_names.items():
yield v
def get_fields_of_source(self, source) -> [int]:
return self.__source_ids[source]
def get_data_type_of(self, nid):
_, _, _, data_type = self.__id_names[nid]
return data_type
def get_info_for(self, nids):
info = []
for nid in nids:
db_name, source_name, field_name, data_type = self.__id_names[nid]
info.append((nid, db_name, source_name, field_name))
return info
def get_hits_from_info(self, info):
hits = [Hit(nid, db_name, s_name, f_name, 0) for nid, db_name, s_name, f_name in info]
return hits
def get_hits_from_table(self, table) -> [Hit]:
nids = self.get_fields_of_source(table)
info = self.get_info_for(nids)
hits = [Hit(nid, db_name, s_name, f_name, 0) for nid, db_name, s_name, f_name in info]
return hits
def get_cardinality_of(self, node_id):
c = self.__G.node[node_id]
card = c['cardinality']
if card is None:
return 0 # no cardinality is like card 0
return card
def _get_underlying_repr_graph(self):
return self.__G
def _get_underlying_repr_id_to_field_info(self):
return self.__id_names
def _get_underlying_repr_table_to_ids(self):
return self.__source_ids
def _visualize_graph(self):
nx.draw(self.__G)
plt.show()
def init_meta_schema(self, fields: (int, str, str, str, int, int, str)):
"""
Creates a dictionary of id -> (dbname, sourcename, fieldname)
and one of:
sourcename -> id
Then it also initializes the graph with all the nodes, e.g., ids and the cardinality
for these, if any.
:param fields:
:return:
"""
print("Building schema relation...")
for (nid, db_name, sn_name, fn_name, total_values, unique_values, data_type) in fields:
self.__id_names[nid] = (db_name, sn_name, fn_name, data_type)
self.__source_ids[sn_name].append(nid)
cardinality_ratio = None
if float(total_values) > 0:
cardinality_ratio = float(unique_values) / float(total_values)
self.add_field(nid, cardinality_ratio)
print("Building schema relation...OK")
def add_field(self, nid, cardinality=None):
"""
Creates a graph node for this field and adds it to the graph
:param nid: the id of the node (a hash of dbname, sourcename and fieldname
:param cardinality: the cardinality of the values of the node, if any
:return: the newly added field node
"""
self.__G.add_node(nid, cardinality=cardinality)
return nid
def add_fields(self, list_of_fields):
"""
Creates a list of graph nodes from the list of fields and adds them to the graph
:param list_of_fields: list of (source_name, field_name) tuples
:return: the newly added list of field nodes
"""
nodes = []
for nid, sn, fn in list_of_fields:
n = Hit(nid, sn, fn, -1)
nodes.append(n)
self.__G.add_nodes_from(nodes)
return nodes
def add_relation(self, node_src, node_target, relation, score):
"""
Adds or updates the score of relation for the edge between node_src and node_target
:param node_src: the source node
:param node_target: the target node
:param relation: the type of relation (edge)
:param score: the numerical value of the score
:return:
"""
score = {'score': score}
self.__G.add_edge(node_src, node_target, relation, score)
def fields_degree(self, topk):
degree = nx.degree(self.__G)
sorted_degree = sorted(degree.items(), key=operator.itemgetter(1))
sorted_degree.reverse()
topk_nodes = sorted_degree[:topk]
return topk_nodes
def enumerate_relation(self, relation):
seen_pairs = set()
for nid in self.iterate_ids():
db_name, source_name, field_name, data_type = self.__id_names[nid]
hit = Hit(nid, db_name, source_name, field_name, 0)
neighbors = self.neighbors_id(hit, relation)
for n2 in neighbors:
if not (n2.nid, nid) in seen_pairs:
seen_pairs.add((nid, n2.nid))
string = str(hit) + " - " + str(n2)
yield string
def print_relations(self, relation):
total_relationships = 0
if relation == Relation.CONTENT_SIM:
for x in self.enumerate_relation(Relation.CONTENT_SIM):
total_relationships += 1
print(x)
if relation == Relation.SCHEMA_SIM:
for x in self.enumerate_relation(Relation.SCHEMA):
total_relationships += 1
print(x)
if relation == Relation.PKFK:
for x in self.enumerate_relation(Relation.PKFK):
total_relationships += 1
print(x)
print("Total " + str(relation) + " relations: " + str(total_relationships))
def get_op_from_relation(self, relation):
if relation == Relation.CONTENT_SIM:
return OP.CONTENT_SIM
if relation == Relation.ENTITY_SIM:
return OP.ENTITY_SIM
if relation == Relation.PKFK:
return OP.PKFK
if relation == Relation.SCHEMA:
return OP.TABLE
if relation == Relation.SCHEMA_SIM:
return OP.SCHEMA_SIM
if relation == Relation.MEANS_SAME:
return OP.MEANS_SAME
if relation == Relation.MEANS_DIFF:
return OP.MEANS_DIFF
if relation == Relation.SUBCLASS:
return OP.SUBCLASS
if relation == Relation.SUPERCLASS:
return OP.SUPERCLASS
if relation == Relation.MEMBER:
return OP.MEMBER
if relation == Relation.CONTAINER:
return OP.CONTAINER
def neighbors_id(self, hit: Hit, relation: Relation) -> DRS:
if isinstance(hit, Hit):
nid = str(hit.nid)
if isinstance(hit, str):
nid = hit
nid = str(nid)
data = []
neighbours = self.__G[nid]
for k, v in neighbours.items():
if relation in v:
score = v[relation]['score']
(db_name, source_name, field_name, data_type) = self.__id_names[k]
data.append(Hit(k, db_name, source_name, field_name, score))
op = self.get_op_from_relation(relation)
o_drs = DRS(data, Operation(op, params=[hit]))
return o_drs
def md_neighbors_id(self, hit: Hit, md_neighbors: MRS, relation: Relation) -> DRS:
if isinstance(hit, Hit):
nid = str(hit.nid)
if isinstance(hit, str):
nid = hit
nid = str(nid)
data = []
score = 1.0 # TODO: return more meaningful score results
for hit in md_neighbors:
k = hit.target if hit.target != nid else hit.source
(db_name, source_name, field_name, data_type) = self.__id_names[k]
data.append(Hit(k, db_name, source_name, field_name, score))
op = self.get_op_from_relation(relation)
o_drs = DRS(data, Operation(op, params=[hit]))
return o_drs
def find_path_hit(self, source, target, relation, max_hops=5):
def assemble_field_path_provenance(o_drs, path, relation):
src = path[0]
tgt = path[-1]
origin = DRS([src], Operation(OP.ORIGIN))
o_drs.absorb_provenance(origin)
prev_c = src
for c in path[1:-1]:
nxt = DRS([c], Operation(OP.PKFK, params=[prev_c]))
o_drs.absorb_provenance(nxt)
prev_c = c
sink = DRS([tgt], Operation(OP.PKFK, params=[prev_c]))
o_drs = o_drs.absorb(sink)
return o_drs
def deep_explore(candidates, target_group, already_visited, path, max_hops):
"""
Recursively depth-first explore the graph, checking if candidates are in target_group
Returns (boolean, [])
"""
local_max_hops = max_hops
if local_max_hops == 0:
return False
# first check membership
for c in candidates:
if c in target_group:
path.insert(0, c)
return True
# if not, then we explore these individually
for c in candidates:
if c in already_visited:
continue # next candidate
else:
already_visited.append(c) # add candidate to set of already visited
next_level_candidates = [x for x in self.neighbors_id(c, relation)] # get next set of candidates
if len(next_level_candidates) == 0:
continue
next_max_hops = local_max_hops - 1 # reduce one level depth and go ahead
success = deep_explore(next_level_candidates, target_group, already_visited, path, next_max_hops)
if success:
path.insert(0, c)
return True
return False # if all nodes were already visited
# maximum number of hops
max_hops = 5
o_drs = DRS([], Operation(OP.NONE)) # Carrier of provenance
# TODO: same src == trg, etc
path = []
success = deep_explore([source], [target], [], path, max_hops)
if success:
o_drs = assemble_field_path_provenance(o_drs, path, relation)
return o_drs
else:
return DRS([], Operation(OP.NONE))
def find_path_table(self, source: str, target: str, relation, api, max_hops=3):
def assemble_table_path_provenance(o_drs, paths, relation):
for path in paths:
src, src_sibling = path[0]
assert (src_sibling is None) # sibling of source should be None, as source is an origin
tgt, tgt_sibling = path[-1]
origin = DRS([src], Operation(OP.ORIGIN))
o_drs.absorb_provenance(origin)
prev_c = src
for c, sibling in path[1:-1]:
nxt = DRS([sibling], Operation(OP.PKFK, params=[prev_c]))
o_drs.absorb_provenance(nxt)
if c.nid != sibling.nid: # avoid loop on head nodes of the graph
linker = DRS([c], Operation(OP.TABLE, params=[sibling]))
o_drs.absorb_provenance(linker)
prev_c = c
sink = DRS([tgt_sibling], Operation(OP.PKFK, params=[prev_c]))
#The join path at the target has None sibling
if tgt is not None and tgt_sibling is not None and tgt.nid != tgt_sibling.nid:
o_drs = o_drs.absorb_provenance(sink)
linker = DRS([tgt], Operation(OP.TABLE, params=[tgt_sibling]))
o_drs.absorb(linker)
else:
o_drs = o_drs.absorb(sink)
return o_drs
def check_membership(c, paths):
for p in paths:
for (s, sibling) in p:
if c.source_name == s.source_name:
return True
return False
def append_to_paths(paths, c):
new_paths = []
for p in paths:
new_path = []
new_path.extend(p)
new_path.append(c)
new_paths.append(new_path)
return new_paths
def get_table_neighbors(hit, relation, paths):
results = []
direct_neighbors = self.neighbors_id(hit, relation)
# Rewriting results - filtering out results that are in the same table as the input. Rewriting prov
direct_neighbors_list = [neigh for neigh in direct_neighbors if neigh.source_name != hit.source_name]
op = self.get_op_from_relation(relation)
direct_neighbors = DRS(direct_neighbors_list, Operation(op, params=[hit]))
# FIXME: filter out already seen nodes here
for n in direct_neighbors:
if not check_membership(n, paths):
t_neighbors = api.drs_from_table_hit(n) # Brought old API
# t_neighbors = api.make_drs(n) # XXX: this won't take all table neighbors, only the input one
results.extend([(x, n) for x in t_neighbors])
return results # note how we include hit as sibling of x here
def dfs_explore(sources, targets, max_hops, paths):
# Check if sources have reached targets
for (s, sibling) in sources:
if s in targets:
# Append successful paths to found_paths
# T1.A join T2.B, and T2.C may join with other tables T3.D
# get_table_neighbors returns next_candidates (s, sibling) (C,B)
# in case T2 is the target add to the path (sibling, sibling)
# Otherwise (C,B)
if s.source_name == targets[0].source_name:
next_paths = append_to_paths(paths, (sibling, sibling))
else:
next_paths = append_to_paths(paths, (s, sibling))
found_paths.extend(next_paths)
return True
# Check if no more hops are allowed:
if max_hops == 0:
return False # not found path
# Get next set of candidates and keep exploration
for (s, sibling) in sources:
next_candidates = get_table_neighbors(s, relation, paths) # updated paths to test membership
# recursive on new candidates, one fewer hop and updated paths
if len(next_candidates) == 0:
continue
next_paths = append_to_paths(paths, (s, sibling))
dfs_explore(next_candidates, targets, max_hops - 1, next_paths)
o_drs = DRS([], Operation(OP.NONE)) # Carrier of provenance
# TODO: same src == trg, etc
# src_drs = api.drs_from_table(source)
# trg_drs = api.drs_from_table(target)
src_drs = api.make_drs(source)
trg_drs = api.make_drs(target)
found_paths = []
candidates = [(x, None) for x in src_drs] # tuple carrying candidate and same-table attribute
paths = [[]] # to carry partial paths
dfs_explore(candidates, [x for x in trg_drs], max_hops, paths)
# for p in found_paths:
# print(p)
o_drs = assemble_table_path_provenance(o_drs, found_paths, relation)
return o_drs
def serialize_network_to_csv(network, path):
nodes = set()
G = network._get_underlying_repr_graph()
with open(path + "edges.csv", 'w') as f:
for src, tgt in G.edges_iter(data=False):
s = str(src) + "," + str(tgt) + "," + "1\n"
nodes.add(src)
nodes.add(tgt)
f.write(s)
with open(path + "nodes.csv", 'w') as f:
for n in nodes:
s = str(n) + "," + "node\n"
f.write(s)
def serialize_network(network, path):
"""
Serialize the meta schema index
:param network:
:param path:
:return:
"""
G = network._get_underlying_repr_graph()
id_to_field_info = network._get_underlying_repr_id_to_field_info()
table_to_ids = network._get_underlying_repr_table_to_ids()
# Make sure we create directory if this does not exist
path = path + '/' # force separator
os.makedirs(os.path.dirname(path), exist_ok=True)
nx.write_gpickle(G, path + "graph.pickle")
nx.write_gpickle(id_to_field_info, path + "id_info.pickle")
nx.write_gpickle(table_to_ids, path + "table_ids.pickle")
def deserialize_network(path):
G = nx.read_gpickle(path + "graph.pickle")
id_to_info = nx.read_gpickle(path + "id_info.pickle")
table_to_ids = nx.read_gpickle(path + "table_ids.pickle")
network = FieldNetwork(G, id_to_info, table_to_ids)
return network
if __name__ == "__main__":
print("Field Network")
| 37.479915
| 115
| 0.583822
|
2c7f5e18a11989d09c2561974fbf09c8c4de43d7
| 2,730
|
py
|
Python
|
tests/test_rpc/test_torrent_methods.py
|
SickChill/rtorrent-python
|
5edd34814d86a473274c4846b311d69ab1641d90
|
[
"MIT"
] | 3
|
2021-01-04T00:34:04.000Z
|
2021-03-31T18:17:08.000Z
|
tests/test_rpc/test_torrent_methods.py
|
SickChill/new-rtorrent-python
|
5edd34814d86a473274c4846b311d69ab1641d90
|
[
"MIT"
] | null | null | null |
tests/test_rpc/test_torrent_methods.py
|
SickChill/new-rtorrent-python
|
5edd34814d86a473274c4846b311d69ab1641d90
|
[
"MIT"
] | 1
|
2021-03-31T18:23:35.000Z
|
2021-03-31T18:23:35.000Z
|
import unittest
from .torrent import Torrent
INFO_HASH = '1D226C20D67F8F2DDEE4FD99A880974B3F2B6F1E'
t = Torrent(None, INFO_HASH)
class TestGetInfoHash(unittest.TestCase):
def setUp(self):
self.call = t.rpc_call('get_info_hash')
def test_pre_processing(self):
self.call.do_pre_processing()
self.assertEqual(self.call.get_args(), [INFO_HASH])
def test_post_processing(self):
result = self.call.do_post_processing(INFO_HASH)
self.assertEqual(result, INFO_HASH)
class TestSetPriority(unittest.TestCase):
def test_pre_processing1(self):
call = self._get_call('off')
call.do_pre_processing()
self.assertEqual(call.get_args(), [INFO_HASH, 0])
def test_pre_processing2(self):
call = self._get_call('low')
call.do_pre_processing()
self.assertEqual(call.get_args(), [INFO_HASH, 1])
def test_pre_processing3(self):
call = self._get_call('normal')
call.do_pre_processing()
self.assertEqual(call.get_args(), [INFO_HASH, 2])
def test_pre_processing4(self):
call = self._get_call('high')
call.do_pre_processing()
self.assertEqual(call.get_args(), [INFO_HASH, 3])
def test_post_processing(self):
call = self._get_call()
result = call.do_post_processing(0)
self.assertEqual(result, True)
def _get_call(self, *args):
return t.rpc_call('set_priority', *args)
class TestGetPriority(unittest.TestCase):
def setUp(self):
self.call = t.rpc_call('get_priority')
def test_pre_processing(self):
self.call.do_pre_processing()
self.assertEqual(self.call.get_args(), [INFO_HASH])
def test_post_processing1(self):
result = self.call.do_post_processing(0)
self.assertEqual(result, 'off')
def test_post_processing2(self):
result = self.call.do_post_processing(1)
self.assertEqual(result, 'low')
def test_post_processing3(self):
result = self.call.do_post_processing(2)
self.assertEqual(result, 'normal')
def test_post_processing4(self):
result = self.call.do_post_processing(3)
self.assertEqual(result, 'high')
class TestIsAcceptingSeeders(unittest.TestCase):
def setUp(self):
self.call = t.rpc_call('is_accepting_seeders')
def test_pre_processing(self):
self.call.do_pre_processing()
self.assertEqual(self.call.get_args(), [INFO_HASH])
def test_post_processing1(self):
result = self.call.do_post_processing(0)
self.assertEqual(result, False)
def test_post_processing2(self):
result = self.call.do_post_processing(1)
self.assertEqual(result, True)
| 29.354839
| 59
| 0.678388
|
3c349585851d33f938671be036da7ece93d573cf
| 146
|
py
|
Python
|
stimuli/Python/one_file_per_item/jap/29_# math_for 11.py
|
ALFA-group/neural_program_comprehension
|
0253911f376cf282af5a5627e38e0a591ad38860
|
[
"MIT"
] | 6
|
2020-04-24T08:16:51.000Z
|
2021-11-01T09:50:46.000Z
|
stimuli/Python/one_file_per_item/jap/29_# math_for 11.py
|
ALFA-group/neural_program_comprehension
|
0253911f376cf282af5a5627e38e0a591ad38860
|
[
"MIT"
] | null | null | null |
stimuli/Python/one_file_per_item/jap/29_# math_for 11.py
|
ALFA-group/neural_program_comprehension
|
0253911f376cf282af5a5627e38e0a591ad38860
|
[
"MIT"
] | 4
|
2021-02-17T20:21:31.000Z
|
2022-02-14T12:43:23.000Z
|
risto1 = [4, 4, 6]
risto2 = [1, 2, 4]
risto3 = [3, 2, 1]
for ai in range(len(risto1)):
risto3[ai] += risto1[ai] + risto2[ai]
print(risto3)
| 14.6
| 41
| 0.568493
|
48f7c20c5eb5b0b753fa87fbabe1148407d33223
| 2,911
|
py
|
Python
|
codes/test_rule.py
|
soumyadip1995/Outlier-detection-Data-Exploration-Tutorial
|
57c058ffc02e613c8447551f7c8a34b369bc69c3
|
[
"MIT"
] | null | null | null |
codes/test_rule.py
|
soumyadip1995/Outlier-detection-Data-Exploration-Tutorial
|
57c058ffc02e613c8447551f7c8a34b369bc69c3
|
[
"MIT"
] | null | null | null |
codes/test_rule.py
|
soumyadip1995/Outlier-detection-Data-Exploration-Tutorial
|
57c058ffc02e613c8447551f7c8a34b369bc69c3
|
[
"MIT"
] | null | null | null |
import crcf.rule
import numpy as np
class TestAxisAlignedRule:
def test_init(self):
""" make sure the rule initializes and stores the correct values"""
rule = crcf.rule.AxisAlignedRule(5, 0)
assert rule.dimension == 5
assert rule.value == 0
def test_evaluate(self):
""" make sure simple evaluates perform as expected"""
rule = crcf.rule.AxisAlignedRule(5, 0)
x = np.array([[0, 0, 0, 0, 0, -1],
[0, 0, 0, 0, 0, 1]])
results = rule.evaluate(x)
assert results[0]
assert not results[1]
def test_generate_biased(self):
""" make sure new rules are generated according to a bias"""
# a deterministic bias check
bounding_box = np.array([[0, 0], [0, 1]])
rule = crcf.rule.AxisAlignedRule.generate(bounding_box, mode="biased")
assert rule.dimension == 1
assert rule.value >= 0
assert rule.value <= 1
# a rough attempt at testing the bias rate
bounding_box = np.array([[0, 2], [0, 1]])
num_rules = 1000
rules = [crcf.rule.AxisAlignedRule.generate(bounding_box, mode='biased') for _ in range(num_rules)]
dimension_zeros = [1 if rule.dimension == 0 else 0 for rule in rules]
assert np.sum(dimension_zeros) > num_rules * 0.5
assert np.sum(dimension_zeros) < num_rules * 0.8
def test_generate_uniform(self):
""" make sure we can generate a rule at uniform"""
# a deterministic check
bounding_box = np.array([[0, 0], [0, 1]])
rule = crcf.rule.AxisAlignedRule.generate(bounding_box, mode="uniform")
assert rule.dimension == 0 or rule.dimension == 1
assert rule.value >= 0
assert rule.value <= 1
class TestNonAxisAlignedRule:
def test_init(self):
""" make sure the rule initializes and stores the correct values"""
rule = crcf.rule.NonAxisAlignedRule(np.array([1, 2, 3]), np.array([1, 2, 4]))
assert np.all(rule.point == np.array([1, 2, 4]))
assert np.all(rule.normal == np.array([1, 2, 3]))
assert rule.normal.dot(rule.point) == rule.offset
def test_evaluate(self):
""" make sure we can evaluate points"""
normal = np.array([1, 2, 3])
point = np.array([1, 2, 3])
offset = normal.dot(point)
rule = crcf.rule.NonAxisAlignedRule(normal, point)
points = np.array([[1, 2, 3],
[0, 0, 0],
[5, 4, 6]])
assert np.all(rule.evaluate(points) == (np.inner(normal, points) < offset))
def test_generate_biased(self):
""" make sure new rules are generated according to a bias"""
# TODO: write
pass
def test_generate_uniform(self):
""" make sure we can generate a rule at uniform"""
# a deterministic check
# TODO: write
pass
| 37.805195
| 107
| 0.587771
|
d24688500c71aae0d4139a4ebadb821cd82bf287
| 8,615
|
py
|
Python
|
common.py
|
dstreev/hive_llap_calculator
|
5630d55a8bda182996388dc976ee97e2b6cb58b2
|
[
"Apache-2.0"
] | 15
|
2019-06-13T00:02:15.000Z
|
2021-02-08T03:13:23.000Z
|
common.py
|
dstreev/hive_llap_calculator
|
5630d55a8bda182996388dc976ee97e2b6cb58b2
|
[
"Apache-2.0"
] | 21
|
2019-08-14T18:46:40.000Z
|
2021-02-18T20:56:46.000Z
|
common.py
|
dstreev/hive_llap_calculator
|
5630d55a8bda182996388dc976ee97e2b6cb58b2
|
[
"Apache-2.0"
] | 4
|
2019-06-21T15:26:36.000Z
|
2020-07-10T13:22:14.000Z
|
def left(field, length):
diff = length - len(str(field))
return str(field) + " " * diff
def center(field, length):
if isinstance(field, list):
diff = length - len(str(field[0]))
return " " * (diff / 2) + str(field[0]) + " " * (length - len(str(field[0])) - (diff / 2))
else:
diff = length - len(str(field))
return " " * (diff / 2) + str(field) + " " * (length - len(str(field)) - (diff / 2))
def right(field, length):
diff = length - len(str(field))
return " " * diff + str(field)
def pprinttable(rows, fields):
output = buildtable(rows, fields)
for line in output:
print line
return output
def buildtable(rows, fields):
str_list = []
if len(rows) > 0:
# headers = HEADER._fields
# headers = HEADER
lens = []
for field in fields:
lens.append(len(field[1]))
for row in rows:
inc = 0
for field in fields:
if isinstance(row[field[0]], (int, float, long)):
if lens[inc] < 4:
lens[inc] = 4
if lens[inc] < len(str(row[field[0]])):
lens[inc] = len(str(row[field[0]]))
# if lens[inc] < 16:
# lens[inc] = 16
elif isinstance(row[field[0]], (list, tuple)):
size = 2
for i in range(len(row[field[0]])):
size += len(row[field[0]][i]) + 3
if size > lens[inc]:
lens[inc] = size
elif isinstance(row[field[0]], (dict)):
size = 2
for i in range(len(row[field[0]])):
size += len(row[field[0]]) + 3
if size > lens[inc]:
lens[inc] = size
else:
if row[field[0]] is not None and (len(row[field[0]]) > lens[inc]):
lens[inc] = len(row[field[0]])
inc += 1
headerRowSeparator = ""
headerRow = ""
for loc in range(len(fields)):
headerRowSeparator = headerRowSeparator + "|" + "=" * (lens[loc]+1)
headerRow = headerRow + "| " + center([fields[loc][1]], lens[loc])
headerRowSeparator = headerRowSeparator + "|"
headerRow = headerRow + "|"
str_list.append(headerRowSeparator)
# print headerRowSeparator
str_list.append(headerRow)
# print headerRow
str_list.append(headerRowSeparator)
# print headerRowSeparator
for row in rows:
inc = 0
recordRow = ""
offset = 0
for field in fields:
if isinstance(row[field[0]], int) or isinstance(row[field[0]], float) or isinstance(row[field[0]], long):
recordRow = recordRow + "| " + right(row[field[0]], lens[inc])
# elif isinstance(row[field[0]], bool):
# if row[field[0]]:
# recordRow = recordRow + "| " + right('X', lens[inc])
# else:
# recordRow = recordRow + "| " + right('', lens[inc])
elif isinstance(row[field[0]], (dict)):
# recordRow = recordRow + "| "
offset = len(recordRow)
it = 0
for item in row[field[0]]:
dictItem = str(row[field[0]][item])
if it == 0:
recordRow = recordRow + '|' + left(dictItem, lens[inc] + 1) + '|\n|'
elif it == len(row[field[0]]) - 1:
recordRow = recordRow + ' '.rjust(offset-1) + '|' + left(dictItem, lens[inc] + 1)
else:
recordRow = recordRow + ' '.rjust(offset-1) + '|' + left(dictItem, lens[inc] + 1) + '|\n|'
it += 1
else:
recordRow = recordRow + "| " + left(row[field[0]], lens[inc])
inc += 1
recordRow = recordRow + "|"
str_list.append(recordRow)
# print recordRow
str_list.append(headerRowSeparator)
# print headerRowSeparator
return str_list
def pprinttable2(rows, fields):
output = buildtable2(rows, fields)
for line in output:
print line
def buildtable2(rows, fields):
str_list = []
if len(rows) > 0:
# headers = HEADER._fields
# headers = HEADER
lens = []
for field in fields:
lens.append(len(field))
for row in rows:
inc = 0
for field in fields:
try:
value = row[field]
if isinstance(row[field], (int, float, long)):
if lens[inc] < 4:
lens[inc] = 4
if lens[inc] < len(str(row[field])):
lens[inc] = len(str(row[field]))
# if lens[inc] < 16:
# lens[inc] = 16
elif isinstance(row[field], (list, tuple)):
size = 2
for i in range(len(row[field])):
size += len(row[field][i]) + 3
if size > lens[inc]:
lens[inc] = size
elif isinstance(row[field], (dict)):
size = 2
for i in range(len(row[field])):
size += len(row[field]) + 3
if size > lens[inc]:
lens[inc] = size
else:
if row[field] is not None and (len(row[field]) > lens[inc]):
lens[inc] = len(row[field])
except:
pass
inc += 1
headerRowSeparator = ""
headerRow = ""
loc = 0
for field in fields:
# for loc in range(len(fields)):
headerRowSeparator = headerRowSeparator + "|" + "=" * (lens[loc]+1)
headerRow = headerRow + "| " + center(field, lens[loc])
loc += 1
headerRowSeparator = headerRowSeparator + "|"
headerRow = headerRow + "|"
str_list.append(headerRowSeparator)
# print headerRowSeparator
str_list.append(headerRow)
# print headerRow
str_list.append(headerRowSeparator)
# print headerRowSeparator
for row in rows:
inc = 0
recordRow = ""
offset = 0
for field in fields:
try:
value = row[field]
if isinstance(row[field], int) or isinstance(row[field], float) or isinstance(row[field], long):
recordRow = recordRow + "| " + right(row[field], lens[inc])
# elif isinstance(row[field[0]], bool):
# if row[field[0]]:
# recordRow = recordRow + "| " + right('X', lens[inc])
# else:
# recordRow = recordRow + "| " + right('', lens[inc])
elif isinstance(row[field], (dict)):
# recordRow = recordRow + "| "
offset = len(recordRow)
it = 0
for item in row[field]:
dictItem = str(item) + ':' + str(row[field][item])
if it == 0:
recordRow = recordRow + '|' + left(dictItem, lens[inc] + 1) + '|\n|'
elif it == len(row[field]) - 1:
recordRow = recordRow + ' '.rjust(offset-1) + '|' + left(dictItem, lens[inc] + 1)
else:
recordRow = recordRow + ' '.rjust(offset-1) + '|' + left(dictItem, lens[inc] + 1) + '|\n|'
it += 1
else:
recordRow = recordRow + "| " + left(row[field], lens[inc])
except:
recordRow = recordRow + "| " + left(' ', lens[inc])
inc += 1
recordRow = recordRow + "|"
str_list.append(recordRow)
# print recordRow
str_list.append(headerRowSeparator)
# print headerRowSeparator
return str_list
| 37.951542
| 122
| 0.427974
|
ffa8940736078b56c3817877ca10bc8c08d6324e
| 7,512
|
py
|
Python
|
Fuzzy_clustering/version2/feature_selection_manager/feature_selection_permutation.py
|
joesider9/forecasting_library
|
db07ff8f0f2693983058d49004f2fc6f8849d197
|
[
"Apache-2.0"
] | null | null | null |
Fuzzy_clustering/version2/feature_selection_manager/feature_selection_permutation.py
|
joesider9/forecasting_library
|
db07ff8f0f2693983058d49004f2fc6f8849d197
|
[
"Apache-2.0"
] | null | null | null |
Fuzzy_clustering/version2/feature_selection_manager/feature_selection_permutation.py
|
joesider9/forecasting_library
|
db07ff8f0f2693983058d49004f2fc6f8849d197
|
[
"Apache-2.0"
] | null | null | null |
import copy
import joblib
import os
import numpy as np
from sklearn.decomposition import PCA
from Fuzzy_clustering.version2.sklearn_models.sklearn_models_optuna import sklearn_model
class FS(object):
def __init__(self, static_data, model_path, njobs, inner_jobs, path_group=None):
self.static_data = static_data
self.path_group = path_group
self.njobs = njobs
self.inner_jobs = inner_jobs
self.log_dir = os.path.join(model_path, 'FS/PERM')
if not os.path.exists(self.log_dir):
os.makedirs(self.log_dir)
def reduce_dim(self, cvs):
ncpus = joblib.load(os.path.join(self.path_group, 'total_cpus.pickle'))
gpu_status = joblib.load(os.path.join(self.path_group, 'gpu_status.pickle'))
njobs = int(ncpus - gpu_status)
cpu_status = njobs
joblib.dump(cpu_status, os.path.join(self.path_group, 'cpu_status.pickle'))
for i in range(3):
cvs[i][0] = cvs[i][0][:, self.features]
cvs[i][2] = cvs[i][2][:, self.features]
cvs[i][4] = cvs[i][4][:, self.features]
X_train = cvs[0][0]
y_train = cvs[0][1].reshape(-1, 1)
X_val = cvs[0][2]
y_val = cvs[0][3].reshape(-1, 1)
X_test = cvs[0][4]
y_test = cvs[0][5].reshape(-1, 1)
X_train = np.vstack((X_train, X_val, X_test))
y_train = np.vstack((y_train, y_val, y_test))
reduction = np.linspace(48, self.N_tot, self.N_tot - 48) / np.logspace(0, 0.3, self.N_tot - 48)
n_components = reduction[int(X_train.shape[1] - 48 - 1)]
pca = PCA(n_components=n_components)
pca.fit(X_train)
return pca
def fit(self, cvs):
# logger = logging.getLogger('log_fs_permutation')
# logger.setLevel(logging.INFO)
# handler = logging.FileHandler(os.path.join(self.log_dir, 'log_fs_perm.log'), 'w')
# handler.setLevel(logging.INFO)
#
# # create a logging format
# formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
# handler.setFormatter(formatter)
#
# # add the handlers to the logger
# logger.addHandler(handler)
print()
print('Training the model (Fitting to the training data) ')
# logger.info('Training the feature extraction ')
method = 'rf'
ncpus = joblib.load(os.path.join(self.path_group, 'total_cpus.pickle'))
gpu_status = joblib.load(os.path.join(self.path_group, 'gpu_status.pickle'))
njobs = int(ncpus - gpu_status)
cpu_status = njobs
joblib.dump(cpu_status, os.path.join(self.path_group, 'cpu_status.pickle'))
regressor = sklearn_model(self.static_data, self.log_dir, 1, method, njobs, FS=True, path_group=self.path_group)
regressor.train(cvs)
self.N_tot = cvs[0][0].shape[1]
features = np.arange(cvs[0][0].shape[1])
np.random.shuffle(features)
# features=features[np.argsort(regressor.model.feature_importances_)[::-1]]
acc_test = regressor.acc_test
# cv_result = regressor.cv_results.nlargest(10, 'acc')['params'].to_list()
flag = True
cvs_temp = copy.deepcopy(cvs)
remove_features = []
keep_features = []
unchecked = np.copy(features)
while flag:
for f in unchecked:
ncpus = joblib.load(os.path.join(self.path_group, 'total_cpus.pickle'))
gpu_status = joblib.load(os.path.join(self.path_group, 'gpu_status.pickle'))
njobs = int(ncpus - gpu_status)
cpu_status = njobs
joblib.dump(cpu_status, os.path.join(self.path_group, 'cpu_status.pickle'))
features_temp = np.hstack(
(np.array(keep_features), np.delete(unchecked, np.where(unchecked == f)))).astype('int')
reg_temp = sklearn_model(self.static_data, os.path.join(self.log_dir, 'temp'), 1, method, njobs,
FS=True, path_group=self.path_group)
for i in range(3):
cvs_temp[i][0] = copy.deepcopy(cvs[i][0][:, features_temp])
cvs_temp[i][2] = copy.deepcopy(cvs[i][2][:, features_temp])
cvs_temp[i][4] = copy.deepcopy(cvs[i][4][:, features_temp])
reg_temp.train(cvs_temp)
# cv_result = reg_temp.cv_results.nlargest(5, 'acc')['params'].to_list()
if (reg_temp.acc_test - acc_test) < -0.005:
# logger.info('Remove feature %s accuracy: %s', str(f), str(reg_temp.acc_test))
print('Remove feature ', str(f), ' accuracy: ', str(reg_temp.acc_test))
remove_features.append(f)
unchecked = np.delete(unchecked, np.where(unchecked == f))
acc_test = reg_temp.acc_test
break
else:
print('ADD feature ', str(f), ' accuracy:', str(reg_temp.acc_test))
# logger.info('ADD feature %s accuracy: %s', str(f), str(reg_temp.acc_test))
keep_features.append(f)
unchecked = np.delete(unchecked, np.where(unchecked == f))
if unchecked.shape[0] == 0:
flag = False
else:
np.random.shuffle(unchecked)
features = np.array(keep_features)
self.features = features
if self.features.shape[0] > 48:
pca = self.reduce_dim(cvs)
else:
pca = None
# logger.info('Number of variables %s', str(self.features.shape[0]))
# logger.info('Finish the feature extraction ')
return features, pca
#
# def test_fs_permute(cvs, X_test1, y_test1, cluster_dir):
#
# logger = logging.getLogger('log_rbf_cnn_test.log')
# logger.setLevel(logging.INFO)
# handler = logging.FileHandler(os.path.join(cluster_dir, 'log_rbf_cnn_test.log'), 'a')
# handler.setLevel(logging.INFO)
#
# # create a logging format
# formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
# handler.setFormatter(formatter)
#
# # add the handlers to the logger
# logger.addHandler(handler)
#
# rated = None
#
# static_data = write_database()
#
# logger.info('Permutation Evaluation')
# logger.info('/n')
# method = 'svm'
# model_sklearn = sklearn_model(cluster_dir, rated, method, static_data['sklearn']['njobs'])
# model_sklearn.train(cvs)
# pred = model_sklearn.predict(X_test1)
#
# metrics_svm = model_sklearn.compute_metrics(pred, y_test1, rated)
# logger.info('before feature selection metrics')
# logger.info('sse, %s rms %s, mae %s, mse %s', *metrics_svm)
#
# fs = FS(cluster_dir, static_data['sklearn']['njobs'])
# features = fs.fit(cvs)
# logger.info('Number of variables %s', str(features.shape[0]))
#
# for i in range(3):
# cvs[i][0] = cvs[i][0][:,features]
# cvs[i][2] = cvs[i][2][:,features]
# cvs[i][4] = cvs[i][4][:,features]
#
# model_sklearn = sklearn_model(cluster_dir, rated, method, static_data['sklearn']['njobs'])
# model_sklearn.train(cvs)
# pred = model_sklearn.predict(X_test1[:,features])
#
# metrics_svm = model_sklearn.compute_metrics(pred, y_test1, rated)
# logger.info('After feature selection metrics')
# logger.info('sse, %s rms %s, mae %s, mse %s', *metrics_svm)
| 39.329843
| 120
| 0.594515
|
a57b0b6819eeed9fe1ac961f65d10a4b3d65bcc1
| 1,146
|
py
|
Python
|
tests/test_parser.py
|
kmlee78/django-ninja
|
ae543f2d43ff4fbc0aa22713942be77d25e1bca3
|
[
"MIT"
] | 2,809
|
2020-06-21T08:48:40.000Z
|
2022-03-30T16:42:11.000Z
|
tests/test_parser.py
|
kmlee78/django-ninja
|
ae543f2d43ff4fbc0aa22713942be77d25e1bca3
|
[
"MIT"
] | 311
|
2020-06-22T07:59:27.000Z
|
2022-03-31T18:01:23.000Z
|
tests/test_parser.py
|
kmlee78/django-ninja
|
ae543f2d43ff4fbc0aa22713942be77d25e1bca3
|
[
"MIT"
] | 178
|
2020-07-08T00:40:43.000Z
|
2022-03-29T02:05:20.000Z
|
from typing import List
from django.http import HttpRequest, QueryDict
from ninja import NinjaAPI
from ninja.parser import Parser
from ninja.testing import TestClient
class MyParser(Parser):
"Default json parser"
def parse_body(self, request: HttpRequest):
"just splitting body to lines"
return request.body.encode().splitlines()
def parse_querydict(
self, data: QueryDict, list_fields: List[str], request: HttpRequest
):
"Turning empty Query params to None instead of empty string"
result = super().parse_querydict(data, list_fields, request)
for k, v in list(result.items()):
if v == "":
del result[k]
return result
api = NinjaAPI(parser=MyParser())
@api.post("/test")
def operation(request, body: List[str], emptyparam: str = None):
return {"emptyparam": emptyparam, "body": body}
def test_parser():
client = TestClient(api)
response = client.post("/test?emptyparam", body="test\nbar")
assert response.status_code == 200, response.content
assert response.json() == {"emptyparam": None, "body": ["test", "bar"]}
| 27.95122
| 75
| 0.667539
|
2fcd4d558768664c6931c2078ea7f1d57381baf0
| 863
|
py
|
Python
|
increase_inodes.py
|
arcapix/gpfsapi-examples
|
15bff7fda7b0a576209253dee48eb44e4c0d565f
|
[
"MIT"
] | 10
|
2016-05-17T12:58:35.000Z
|
2022-01-10T05:23:45.000Z
|
increase_inodes.py
|
arcapix/gpfsapi-examples
|
15bff7fda7b0a576209253dee48eb44e4c0d565f
|
[
"MIT"
] | null | null | null |
increase_inodes.py
|
arcapix/gpfsapi-examples
|
15bff7fda7b0a576209253dee48eb44e4c0d565f
|
[
"MIT"
] | 1
|
2016-09-12T09:07:00.000Z
|
2016-09-12T09:07:00.000Z
|
from arcapix.fs.gpfs import Cluster
threshold_pct = 80 # watermark for inode increasing
incr_pct = 20 # increase by pct
max_inode_incr = 50000 # do not increase by more than max_inode_incr
for fset in Cluster().filesystems['mmfs1'].filesets.independent().values():
# Check if the fileset has breached its inode watermark
if fset.allocInodes >= (fset.maxInodes * threshold_pct / 100.):
# Increase the inodes of the fileset
new_inodes_num = int(fset.maxInodes * incr_pct / 100.)
# Ensure that the additional increase does not exceed the maximum inode increase
if new_inodes_num > max_inode_incr:
new_inodes_num = max_inode_incr
# Add the new allocation on top of the current maximum allocation
new_inodes_num = new_inodes_num + fset.maxInodes
# Make the change
fset.change(maxInodes=new_inodes_num)
| 35.958333
| 84
| 0.734647
|
d8ca0971017be590616446e8539418be6606468b
| 34,622
|
py
|
Python
|
middleware/venv/lib/python3.6/site-packages/sqlalchemy/pool/base.py
|
gregorypothier/pothier-website
|
7aac5bffa9e0dda29d031783a3c16e3f3e26a945
|
[
"CNRI-Python"
] | 1
|
2020-01-10T04:22:33.000Z
|
2020-01-10T04:22:33.000Z
|
middleware/venv/lib/python3.6/site-packages/sqlalchemy/pool/base.py
|
gregorypothier/pothier-website
|
7aac5bffa9e0dda29d031783a3c16e3f3e26a945
|
[
"CNRI-Python"
] | null | null | null |
middleware/venv/lib/python3.6/site-packages/sqlalchemy/pool/base.py
|
gregorypothier/pothier-website
|
7aac5bffa9e0dda29d031783a3c16e3f3e26a945
|
[
"CNRI-Python"
] | null | null | null |
# sqlalchemy/pool.py
# Copyright (C) 2005-2019 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""Base constructs for connection pools.
"""
from collections import deque
import time
import weakref
from .. import event
from .. import exc
from .. import interfaces
from .. import log
from .. import util
from ..util import threading
reset_rollback = util.symbol("reset_rollback")
reset_commit = util.symbol("reset_commit")
reset_none = util.symbol("reset_none")
class _ConnDialect(object):
"""partial implementation of :class:`.Dialect`
which provides DBAPI connection methods.
When a :class:`.Pool` is combined with an :class:`.Engine`,
the :class:`.Engine` replaces this with its own
:class:`.Dialect`.
"""
def do_rollback(self, dbapi_connection):
dbapi_connection.rollback()
def do_commit(self, dbapi_connection):
dbapi_connection.commit()
def do_close(self, dbapi_connection):
dbapi_connection.close()
def do_ping(self, dbapi_connection):
raise NotImplementedError(
"The ping feature requires that a dialect is "
"passed to the connection pool."
)
class Pool(log.Identified):
"""Abstract base class for connection pools."""
_dialect = _ConnDialect()
@util.deprecated_params(
use_threadlocal=(
"1.3",
"The :paramref:`.Pool.use_threadlocal` parameter is "
"deprecated and will be removed in a future release.",
),
listeners=(
"0.7",
":class:`.PoolListener` is deprecated in favor of the "
":class:`.PoolEvents` listener interface. The "
":paramref:`.Pool.listeners` parameter will be removed in a "
"future release.",
),
)
def __init__(
self,
creator,
recycle=-1,
echo=None,
use_threadlocal=False,
logging_name=None,
reset_on_return=True,
listeners=None,
events=None,
dialect=None,
pre_ping=False,
_dispatch=None,
):
"""
Construct a Pool.
:param creator: a callable function that returns a DB-API
connection object. The function will be called with
parameters.
:param recycle: If set to a value other than -1, number of
seconds between connection recycling, which means upon
checkout, if this timeout is surpassed the connection will be
closed and replaced with a newly opened connection. Defaults to -1.
:param logging_name: String identifier which will be used within
the "name" field of logging records generated within the
"sqlalchemy.pool" logger. Defaults to a hexstring of the object's
id.
:param echo: if True, the connection pool will log
informational output such as when connections are invalidated
as well as when connections are recycled to the default log handler,
which defaults to ``sys.stdout`` for output.. If set to the string
``"debug"``, the logging will include pool checkouts and checkins.
The :paramref:`.Pool.echo` parameter can also be set from the
:func:`.create_engine` call by using the
:paramref:`.create_engine.echo_pool` parameter.
.. seealso::
:ref:`dbengine_logging` - further detail on how to configure
logging.
:param use_threadlocal: If set to True, repeated calls to
:meth:`connect` within the same application thread will be
guaranteed to return the same connection object that is already
checked out. This is a legacy use case and the flag has no
effect when using the pool with a :class:`.Engine` object.
:param reset_on_return: Determine steps to take on
connections as they are returned to the pool.
reset_on_return can have any of these values:
* ``"rollback"`` - call rollback() on the connection,
to release locks and transaction resources.
This is the default value. The vast majority
of use cases should leave this value set.
* ``True`` - same as 'rollback', this is here for
backwards compatibility.
* ``"commit"`` - call commit() on the connection,
to release locks and transaction resources.
A commit here may be desirable for databases that
cache query plans if a commit is emitted,
such as Microsoft SQL Server. However, this
value is more dangerous than 'rollback' because
any data changes present on the transaction
are committed unconditionally.
* ``None`` - don't do anything on the connection.
This setting should generally only be made on a database
that has no transaction support at all,
namely MySQL MyISAM; when used on this app, performance
can be improved as the "rollback" call is still expensive on
MySQL. It is **strongly recommended** that this setting not be
used for transaction-supporting databases in conjunction with
a persistent pool such as :class:`.QueuePool`, as it opens
the possibility for connections still in a transaction to be
idle in the pool. The setting may be appropriate in the
case of :class:`.NullPool` or special circumstances where
the connection pool in use is not being used to maintain connection
lifecycle.
* ``False`` - same as None, this is here for
backwards compatibility.
:param events: a list of 2-tuples, each of the form
``(callable, target)`` which will be passed to :func:`.event.listen`
upon construction. Provided here so that event listeners
can be assigned via :func:`.create_engine` before dialect-level
listeners are applied.
:param listeners: A list of :class:`.PoolListener`-like objects or
dictionaries of callables that receive events when DB-API
connections are created, checked out and checked in to the
pool.
:param dialect: a :class:`.Dialect` that will handle the job
of calling rollback(), close(), or commit() on DBAPI connections.
If omitted, a built-in "stub" dialect is used. Applications that
make use of :func:`~.create_engine` should not use this parameter
as it is handled by the engine creation strategy.
.. versionadded:: 1.1 - ``dialect`` is now a public parameter
to the :class:`.Pool`.
:param pre_ping: if True, the pool will emit a "ping" (typically
"SELECT 1", but is dialect-specific) on the connection
upon checkout, to test if the connection is alive or not. If not,
the connection is transparently re-connected and upon success, all
other pooled connections established prior to that timestamp are
invalidated. Requires that a dialect is passed as well to
interpret the disconnection error.
.. versionadded:: 1.2
"""
if logging_name:
self.logging_name = self._orig_logging_name = logging_name
else:
self._orig_logging_name = None
log.instance_logger(self, echoflag=echo)
self._threadconns = threading.local()
self._creator = creator
self._recycle = recycle
self._invalidate_time = 0
self._use_threadlocal = use_threadlocal
self._pre_ping = pre_ping
self._reset_on_return = util.symbol.parse_user_argument(
reset_on_return,
{
reset_rollback: ["rollback", True],
reset_none: ["none", None, False],
reset_commit: ["commit"],
},
"reset_on_return",
resolve_symbol_names=False,
)
self.echo = echo
if _dispatch:
self.dispatch._update(_dispatch, only_propagate=False)
if dialect:
self._dialect = dialect
if events:
for fn, target in events:
event.listen(self, target, fn)
if listeners:
for l in listeners:
self.add_listener(l)
@property
def _creator(self):
return self.__dict__["_creator"]
@_creator.setter
def _creator(self, creator):
self.__dict__["_creator"] = creator
self._invoke_creator = self._should_wrap_creator(creator)
def _should_wrap_creator(self, creator):
"""Detect if creator accepts a single argument, or is sent
as a legacy style no-arg function.
"""
try:
argspec = util.get_callable_argspec(self._creator, no_self=True)
except TypeError:
return lambda crec: creator()
defaulted = argspec[3] is not None and len(argspec[3]) or 0
positionals = len(argspec[0]) - defaulted
# look for the exact arg signature that DefaultStrategy
# sends us
if (argspec[0], argspec[3]) == (["connection_record"], (None,)):
return creator
# or just a single positional
elif positionals == 1:
return creator
# all other cases, just wrap and assume legacy "creator" callable
# thing
else:
return lambda crec: creator()
def _close_connection(self, connection):
self.logger.debug("Closing connection %r", connection)
try:
self._dialect.do_close(connection)
except Exception:
self.logger.error(
"Exception closing connection %r", connection, exc_info=True
)
@util.deprecated(
"0.7",
"The :meth:`.Pool.add_listener` method is deprecated and "
"will be removed in a future release. Please use the "
":class:`.PoolEvents` listener interface.",
)
def add_listener(self, listener):
"""Add a :class:`.PoolListener`-like object to this pool.
``listener`` may be an object that implements some or all of
PoolListener, or a dictionary of callables containing implementations
of some or all of the named methods in PoolListener.
"""
interfaces.PoolListener._adapt_listener(self, listener)
def unique_connection(self):
"""Produce a DBAPI connection that is not referenced by any
thread-local context.
This method is equivalent to :meth:`.Pool.connect` when the
:paramref:`.Pool.use_threadlocal` flag is not set to True.
When :paramref:`.Pool.use_threadlocal` is True, the
:meth:`.Pool.unique_connection` method provides a means of bypassing
the threadlocal context.
"""
return _ConnectionFairy._checkout(self)
def _create_connection(self):
"""Called by subclasses to create a new ConnectionRecord."""
return _ConnectionRecord(self)
def _invalidate(self, connection, exception=None, _checkin=True):
"""Mark all connections established within the generation
of the given connection as invalidated.
If this pool's last invalidate time is before when the given
connection was created, update the timestamp til now. Otherwise,
no action is performed.
Connections with a start time prior to this pool's invalidation
time will be recycled upon next checkout.
"""
rec = getattr(connection, "_connection_record", None)
if not rec or self._invalidate_time < rec.starttime:
self._invalidate_time = time.time()
if _checkin and getattr(connection, "is_valid", False):
connection.invalidate(exception)
def recreate(self):
"""Return a new :class:`.Pool`, of the same class as this one
and configured with identical creation arguments.
This method is used in conjunction with :meth:`dispose`
to close out an entire :class:`.Pool` and create a new one in
its place.
"""
raise NotImplementedError()
def dispose(self):
"""Dispose of this pool.
This method leaves the possibility of checked-out connections
remaining open, as it only affects connections that are
idle in the pool.
.. seealso::
:meth:`Pool.recreate`
"""
raise NotImplementedError()
def connect(self):
"""Return a DBAPI connection from the pool.
The connection is instrumented such that when its
``close()`` method is called, the connection will be returned to
the pool.
"""
if not self._use_threadlocal:
return _ConnectionFairy._checkout(self)
try:
rec = self._threadconns.current()
except AttributeError:
pass
else:
if rec is not None:
return rec._checkout_existing()
return _ConnectionFairy._checkout(self, self._threadconns)
def _return_conn(self, record):
"""Given a _ConnectionRecord, return it to the :class:`.Pool`.
This method is called when an instrumented DBAPI connection
has its ``close()`` method called.
"""
if self._use_threadlocal:
try:
del self._threadconns.current
except AttributeError:
pass
self._do_return_conn(record)
def _do_get(self):
"""Implementation for :meth:`get`, supplied by subclasses."""
raise NotImplementedError()
def _do_return_conn(self, conn):
"""Implementation for :meth:`return_conn`, supplied by subclasses."""
raise NotImplementedError()
def status(self):
raise NotImplementedError()
class _ConnectionRecord(object):
"""Internal object which maintains an individual DBAPI connection
referenced by a :class:`.Pool`.
The :class:`._ConnectionRecord` object always exists for any particular
DBAPI connection whether or not that DBAPI connection has been
"checked out". This is in contrast to the :class:`._ConnectionFairy`
which is only a public facade to the DBAPI connection while it is checked
out.
A :class:`._ConnectionRecord` may exist for a span longer than that
of a single DBAPI connection. For example, if the
:meth:`._ConnectionRecord.invalidate`
method is called, the DBAPI connection associated with this
:class:`._ConnectionRecord`
will be discarded, but the :class:`._ConnectionRecord` may be used again,
in which case a new DBAPI connection is produced when the :class:`.Pool`
next uses this record.
The :class:`._ConnectionRecord` is delivered along with connection
pool events, including :meth:`.PoolEvents.connect` and
:meth:`.PoolEvents.checkout`, however :class:`._ConnectionRecord` still
remains an internal object whose API and internals may change.
.. seealso::
:class:`._ConnectionFairy`
"""
def __init__(self, pool, connect=True):
self.__pool = pool
if connect:
self.__connect(first_connect_check=True)
self.finalize_callback = deque()
fairy_ref = None
starttime = None
connection = None
"""A reference to the actual DBAPI connection being tracked.
May be ``None`` if this :class:`._ConnectionRecord` has been marked
as invalidated; a new DBAPI connection may replace it if the owning
pool calls upon this :class:`._ConnectionRecord` to reconnect.
"""
_soft_invalidate_time = 0
@util.memoized_property
def info(self):
"""The ``.info`` dictionary associated with the DBAPI connection.
This dictionary is shared among the :attr:`._ConnectionFairy.info`
and :attr:`.Connection.info` accessors.
.. note::
The lifespan of this dictionary is linked to the
DBAPI connection itself, meaning that it is **discarded** each time
the DBAPI connection is closed and/or invalidated. The
:attr:`._ConnectionRecord.record_info` dictionary remains
persistent throughout the lifespan of the
:class:`._ConnectionRecord` container.
"""
return {}
@util.memoized_property
def record_info(self):
"""An "info' dictionary associated with the connection record
itself.
Unlike the :attr:`._ConnectionRecord.info` dictionary, which is linked
to the lifespan of the DBAPI connection, this dictionary is linked
to the lifespan of the :class:`._ConnectionRecord` container itself
and will remain persistent throughout the life of the
:class:`._ConnectionRecord`.
.. versionadded:: 1.1
"""
return {}
@classmethod
def checkout(cls, pool):
rec = pool._do_get()
try:
dbapi_connection = rec.get_connection()
except Exception as err:
with util.safe_reraise():
rec._checkin_failed(err)
echo = pool._should_log_debug()
fairy = _ConnectionFairy(dbapi_connection, rec, echo)
rec.fairy_ref = weakref.ref(
fairy,
lambda ref: _finalize_fairy
and _finalize_fairy(None, rec, pool, ref, echo),
)
_refs.add(rec)
if echo:
pool.logger.debug(
"Connection %r checked out from pool", dbapi_connection
)
return fairy
def _checkin_failed(self, err):
self.invalidate(e=err)
self.checkin(_no_fairy_ref=True)
def checkin(self, _no_fairy_ref=False):
if self.fairy_ref is None and not _no_fairy_ref:
util.warn("Double checkin attempted on %s" % self)
return
self.fairy_ref = None
connection = self.connection
pool = self.__pool
while self.finalize_callback:
finalizer = self.finalize_callback.pop()
finalizer(connection)
if pool.dispatch.checkin:
pool.dispatch.checkin(connection, self)
pool._return_conn(self)
@property
def in_use(self):
return self.fairy_ref is not None
@property
def last_connect_time(self):
return self.starttime
def close(self):
if self.connection is not None:
self.__close()
def invalidate(self, e=None, soft=False):
"""Invalidate the DBAPI connection held by this :class:`._ConnectionRecord`.
This method is called for all connection invalidations, including
when the :meth:`._ConnectionFairy.invalidate` or
:meth:`.Connection.invalidate` methods are called, as well as when any
so-called "automatic invalidation" condition occurs.
:param e: an exception object indicating a reason for the invalidation.
:param soft: if True, the connection isn't closed; instead, this
connection will be recycled on next checkout.
.. versionadded:: 1.0.3
.. seealso::
:ref:`pool_connection_invalidation`
"""
# already invalidated
if self.connection is None:
return
if soft:
self.__pool.dispatch.soft_invalidate(self.connection, self, e)
else:
self.__pool.dispatch.invalidate(self.connection, self, e)
if e is not None:
self.__pool.logger.info(
"%sInvalidate connection %r (reason: %s:%s)",
"Soft " if soft else "",
self.connection,
e.__class__.__name__,
e,
)
else:
self.__pool.logger.info(
"%sInvalidate connection %r",
"Soft " if soft else "",
self.connection,
)
if soft:
self._soft_invalidate_time = time.time()
else:
self.__close()
self.connection = None
def get_connection(self):
recycle = False
if self.connection is None:
self.info.clear()
self.__connect()
elif (
self.__pool._recycle > -1
and time.time() - self.starttime > self.__pool._recycle
):
self.__pool.logger.info(
"Connection %r exceeded timeout; recycling", self.connection
)
recycle = True
elif self.__pool._invalidate_time > self.starttime:
self.__pool.logger.info(
"Connection %r invalidated due to pool invalidation; "
+ "recycling",
self.connection,
)
recycle = True
elif self._soft_invalidate_time > self.starttime:
self.__pool.logger.info(
"Connection %r invalidated due to local soft invalidation; "
+ "recycling",
self.connection,
)
recycle = True
if recycle:
self.__close()
self.info.clear()
self.__connect()
return self.connection
def __close(self):
self.finalize_callback.clear()
if self.__pool.dispatch.close:
self.__pool.dispatch.close(self.connection, self)
self.__pool._close_connection(self.connection)
self.connection = None
def __connect(self, first_connect_check=False):
pool = self.__pool
# ensure any existing connection is removed, so that if
# creator fails, this attribute stays None
self.connection = None
try:
self.starttime = time.time()
connection = pool._invoke_creator(self)
pool.logger.debug("Created new connection %r", connection)
self.connection = connection
except Exception as e:
pool.logger.debug("Error on connect(): %s", e)
raise
else:
if first_connect_check:
pool.dispatch.first_connect.for_modify(
pool.dispatch
).exec_once_unless_exception(self.connection, self)
if pool.dispatch.connect:
pool.dispatch.connect(self.connection, self)
def _finalize_fairy(
connection, connection_record, pool, ref, echo, fairy=None
):
"""Cleanup for a :class:`._ConnectionFairy` whether or not it's already
been garbage collected.
"""
_refs.discard(connection_record)
if ref is not None:
if connection_record.fairy_ref is not ref:
return
assert connection is None
connection = connection_record.connection
if connection is not None:
if connection_record and echo:
pool.logger.debug(
"Connection %r being returned to pool", connection
)
try:
fairy = fairy or _ConnectionFairy(
connection, connection_record, echo
)
assert fairy.connection is connection
fairy._reset(pool)
# Immediately close detached instances
if not connection_record:
if pool.dispatch.close_detached:
pool.dispatch.close_detached(connection)
pool._close_connection(connection)
except BaseException as e:
pool.logger.error(
"Exception during reset or similar", exc_info=True
)
if connection_record:
connection_record.invalidate(e=e)
if not isinstance(e, Exception):
raise
if connection_record and connection_record.fairy_ref is not None:
connection_record.checkin()
_refs = set()
class _ConnectionFairy(object):
"""Proxies a DBAPI connection and provides return-on-dereference
support.
This is an internal object used by the :class:`.Pool` implementation
to provide context management to a DBAPI connection delivered by
that :class:`.Pool`.
The name "fairy" is inspired by the fact that the
:class:`._ConnectionFairy` object's lifespan is transitory, as it lasts
only for the length of a specific DBAPI connection being checked out from
the pool, and additionally that as a transparent proxy, it is mostly
invisible.
.. seealso::
:class:`._ConnectionRecord`
"""
def __init__(self, dbapi_connection, connection_record, echo):
self.connection = dbapi_connection
self._connection_record = connection_record
self._echo = echo
connection = None
"""A reference to the actual DBAPI connection being tracked."""
_connection_record = None
"""A reference to the :class:`._ConnectionRecord` object associated
with the DBAPI connection.
This is currently an internal accessor which is subject to change.
"""
_reset_agent = None
"""Refer to an object with a ``.commit()`` and ``.rollback()`` method;
if non-None, the "reset-on-return" feature will call upon this object
rather than directly against the dialect-level do_rollback() and
do_commit() methods.
In practice, a :class:`.Connection` assigns a :class:`.Transaction` object
to this variable when one is in scope so that the :class:`.Transaction`
takes the job of committing or rolling back on return if
:meth:`.Connection.close` is called while the :class:`.Transaction`
still exists.
This is essentially an "event handler" of sorts but is simplified as an
instance variable both for performance/simplicity as well as that there
can only be one "reset agent" at a time.
"""
@classmethod
def _checkout(cls, pool, threadconns=None, fairy=None):
if not fairy:
fairy = _ConnectionRecord.checkout(pool)
fairy._pool = pool
fairy._counter = 0
if threadconns is not None:
threadconns.current = weakref.ref(fairy)
if fairy.connection is None:
raise exc.InvalidRequestError("This connection is closed")
fairy._counter += 1
if (
not pool.dispatch.checkout and not pool._pre_ping
) or fairy._counter != 1:
return fairy
# Pool listeners can trigger a reconnection on checkout, as well
# as the pre-pinger.
# there are three attempts made here, but note that if the database
# is not accessible from a connection standpoint, those won't proceed
# here.
attempts = 2
while attempts > 0:
try:
if pool._pre_ping:
if fairy._echo:
pool.logger.debug(
"Pool pre-ping on connection %s", fairy.connection
)
result = pool._dialect.do_ping(fairy.connection)
if not result:
if fairy._echo:
pool.logger.debug(
"Pool pre-ping on connection %s failed, "
"will invalidate pool",
fairy.connection,
)
raise exc.InvalidatePoolError()
pool.dispatch.checkout(
fairy.connection, fairy._connection_record, fairy
)
return fairy
except exc.DisconnectionError as e:
if e.invalidate_pool:
pool.logger.info(
"Disconnection detected on checkout, "
"invalidating all pooled connections prior to "
"current timestamp (reason: %r)",
e,
)
fairy._connection_record.invalidate(e)
pool._invalidate(fairy, e, _checkin=False)
else:
pool.logger.info(
"Disconnection detected on checkout, "
"invalidating individual connection %s (reason: %r)",
fairy.connection,
e,
)
fairy._connection_record.invalidate(e)
try:
fairy.connection = (
fairy._connection_record.get_connection()
)
except Exception as err:
with util.safe_reraise():
fairy._connection_record._checkin_failed(err)
attempts -= 1
pool.logger.info("Reconnection attempts exhausted on checkout")
fairy.invalidate()
raise exc.InvalidRequestError("This connection is closed")
def _checkout_existing(self):
return _ConnectionFairy._checkout(self._pool, fairy=self)
def _checkin(self):
_finalize_fairy(
self.connection,
self._connection_record,
self._pool,
None,
self._echo,
fairy=self,
)
self.connection = None
self._connection_record = None
_close = _checkin
def _reset(self, pool):
if pool.dispatch.reset:
pool.dispatch.reset(self, self._connection_record)
if pool._reset_on_return is reset_rollback:
if self._echo:
pool.logger.debug(
"Connection %s rollback-on-return%s",
self.connection,
", via agent" if self._reset_agent else "",
)
if self._reset_agent:
self._reset_agent.rollback()
else:
pool._dialect.do_rollback(self)
elif pool._reset_on_return is reset_commit:
if self._echo:
pool.logger.debug(
"Connection %s commit-on-return%s",
self.connection,
", via agent" if self._reset_agent else "",
)
if self._reset_agent:
self._reset_agent.commit()
else:
pool._dialect.do_commit(self)
@property
def _logger(self):
return self._pool.logger
@property
def is_valid(self):
"""Return True if this :class:`._ConnectionFairy` still refers
to an active DBAPI connection."""
return self.connection is not None
@util.memoized_property
def info(self):
"""Info dictionary associated with the underlying DBAPI connection
referred to by this :class:`.ConnectionFairy`, allowing user-defined
data to be associated with the connection.
The data here will follow along with the DBAPI connection including
after it is returned to the connection pool and used again
in subsequent instances of :class:`._ConnectionFairy`. It is shared
with the :attr:`._ConnectionRecord.info` and :attr:`.Connection.info`
accessors.
The dictionary associated with a particular DBAPI connection is
discarded when the connection itself is discarded.
"""
return self._connection_record.info
@property
def record_info(self):
"""Info dictionary associated with the :class:`._ConnectionRecord
container referred to by this :class:`.ConnectionFairy`.
Unlike the :attr:`._ConnectionFairy.info` dictionary, the lifespan
of this dictionary is persistent across connections that are
disconnected and/or invalidated within the lifespan of a
:class:`._ConnectionRecord`.
.. versionadded:: 1.1
"""
if self._connection_record:
return self._connection_record.record_info
else:
return None
def invalidate(self, e=None, soft=False):
"""Mark this connection as invalidated.
This method can be called directly, and is also called as a result
of the :meth:`.Connection.invalidate` method. When invoked,
the DBAPI connection is immediately closed and discarded from
further use by the pool. The invalidation mechanism proceeds
via the :meth:`._ConnectionRecord.invalidate` internal method.
:param e: an exception object indicating a reason for the invalidation.
:param soft: if True, the connection isn't closed; instead, this
connection will be recycled on next checkout.
.. versionadded:: 1.0.3
.. seealso::
:ref:`pool_connection_invalidation`
"""
if self.connection is None:
util.warn("Can't invalidate an already-closed connection.")
return
if self._connection_record:
self._connection_record.invalidate(e=e, soft=soft)
if not soft:
self.connection = None
self._checkin()
def cursor(self, *args, **kwargs):
"""Return a new DBAPI cursor for the underlying connection.
This method is a proxy for the ``connection.cursor()`` DBAPI
method.
"""
return self.connection.cursor(*args, **kwargs)
def __getattr__(self, key):
return getattr(self.connection, key)
def detach(self):
"""Separate this connection from its Pool.
This means that the connection will no longer be returned to the
pool when closed, and will instead be literally closed. The
containing ConnectionRecord is separated from the DB-API connection,
and will create a new connection when next used.
Note that any overall connection limiting constraints imposed by a
Pool implementation may be violated after a detach, as the detached
connection is removed from the pool's knowledge and control.
"""
if self._connection_record is not None:
rec = self._connection_record
_refs.remove(rec)
rec.fairy_ref = None
rec.connection = None
# TODO: should this be _return_conn?
self._pool._do_return_conn(self._connection_record)
self.info = self.info.copy()
self._connection_record = None
if self._pool.dispatch.detach:
self._pool.dispatch.detach(self.connection, rec)
def close(self):
self._counter -= 1
if self._counter == 0:
self._checkin()
| 34.622
| 84
| 0.610248
|
bb7fa601749d283e8e82f295a0483d616134fa6b
| 18,261
|
py
|
Python
|
nemo/collections/tts/models/hifigan.py
|
mlgill/NeMo
|
078a09a248e57883952960df4eae1039976635bc
|
[
"Apache-2.0"
] | null | null | null |
nemo/collections/tts/models/hifigan.py
|
mlgill/NeMo
|
078a09a248e57883952960df4eae1039976635bc
|
[
"Apache-2.0"
] | null | null | null |
nemo/collections/tts/models/hifigan.py
|
mlgill/NeMo
|
078a09a248e57883952960df4eae1039976635bc
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import itertools
import torch
import torch.nn.functional as F
from hydra.utils import instantiate
from omegaconf import DictConfig, OmegaConf, open_dict
from pytorch_lightning.loggers.wandb import WandbLogger
from nemo.collections.tts.data.datalayers import MelAudioDataset
from nemo.collections.tts.helpers.helpers import get_batch_size, get_num_workers, plot_spectrogram_to_numpy
from nemo.collections.tts.losses.hifigan_losses import DiscriminatorLoss, FeatureMatchingLoss, GeneratorLoss
from nemo.collections.tts.models.base import Vocoder
from nemo.collections.tts.modules.hifigan_modules import MultiPeriodDiscriminator, MultiScaleDiscriminator
from nemo.core.classes import Exportable
from nemo.core.classes.common import PretrainedModelInfo, typecheck
from nemo.core.neural_types.elements import AudioSignal, MelSpectrogramType
from nemo.core.neural_types.neural_type import NeuralType
from nemo.core.optim.lr_scheduler import CosineAnnealing, compute_max_steps
from nemo.utils import logging
HAVE_WANDB = True
try:
import wandb
except ModuleNotFoundError:
HAVE_WANDB = False
class HifiGanModel(Vocoder, Exportable):
def __init__(self, cfg: DictConfig, trainer: 'Trainer' = None):
if isinstance(cfg, dict):
cfg = OmegaConf.create(cfg)
super().__init__(cfg=cfg, trainer=trainer)
self.audio_to_melspec_precessor = instantiate(cfg.preprocessor)
# use a different melspec extractor because:
# 1. we need to pass grads
# 2. we need remove fmax limitation
self.trg_melspec_fn = instantiate(cfg.preprocessor, highfreq=None, use_grads=True)
self.generator = instantiate(cfg.generator)
self.mpd = MultiPeriodDiscriminator(debug=cfg.debug if "debug" in cfg else False)
self.msd = MultiScaleDiscriminator(debug=cfg.debug if "debug" in cfg else False)
self.feature_loss = FeatureMatchingLoss()
self.discriminator_loss = DiscriminatorLoss()
self.generator_loss = GeneratorLoss()
self.l1_factor = cfg.get("l1_loss_factor", 45)
self.sample_rate = self._cfg.preprocessor.sample_rate
self.stft_bias = None
if self._train_dl and isinstance(self._train_dl.dataset, MelAudioDataset):
self.input_as_mel = True
else:
self.input_as_mel = False
self.automatic_optimization = False
def _get_max_steps(self):
return compute_max_steps(
max_epochs=self._cfg.max_epochs,
accumulate_grad_batches=self.trainer.accumulate_grad_batches,
limit_train_batches=self.trainer.limit_train_batches,
num_workers=get_num_workers(self.trainer),
num_samples=len(self._train_dl.dataset),
batch_size=get_batch_size(self._train_dl),
drop_last=self._train_dl.drop_last,
)
def _get_warmup_steps(self, max_steps):
warmup_steps = self._cfg.sched.get("warmup_steps", None)
warmup_ratio = self._cfg.sched.get("warmup_ratio", None)
if warmup_steps is not None and warmup_ratio is not None:
raise ValueError(f'Either use warmup_steps or warmup_ratio for scheduler')
if warmup_steps is not None:
return warmup_steps
if warmup_ratio is not None:
return warmup_ratio * max_steps
raise ValueError(f'Specify warmup_steps or warmup_ratio for scheduler')
def configure_optimizers(self):
self.optim_g = instantiate(self._cfg.optim, params=self.generator.parameters(),)
self.optim_d = instantiate(
self._cfg.optim, params=itertools.chain(self.msd.parameters(), self.mpd.parameters()),
)
if hasattr(self._cfg, 'sched'):
max_steps = self._cfg.get("max_steps", None)
if max_steps is None or max_steps < 0:
max_steps = self._get_max_steps()
warmup_steps = self._get_warmup_steps(max_steps)
self.scheduler_g = CosineAnnealing(
optimizer=self.optim_g, max_steps=max_steps, min_lr=self._cfg.sched.min_lr, warmup_steps=warmup_steps,
) # Use warmup to delay start
sch1_dict = {
'scheduler': self.scheduler_g,
'interval': 'step',
}
self.scheduler_d = CosineAnnealing(
optimizer=self.optim_d, max_steps=max_steps, min_lr=self._cfg.sched.min_lr,
)
sch2_dict = {
'scheduler': self.scheduler_d,
'interval': 'step',
}
return [self.optim_g, self.optim_d], [sch1_dict, sch2_dict]
else:
return [self.optim_g, self.optim_d]
@property
def input_types(self):
return {
"spec": NeuralType(('B', 'D', 'T'), MelSpectrogramType()),
}
@property
def output_types(self):
return {
"audio": NeuralType(('B', 'S', 'T'), AudioSignal(self.sample_rate)),
}
@typecheck()
def forward(self, *, spec):
"""
Runs the generator, for inputs and outputs see input_types, and output_types
"""
return self.generator(x=spec)
@typecheck(
input_types={"spec": NeuralType(('B', 'C', 'T'), MelSpectrogramType())},
output_types={"audio": NeuralType(('B', 'T'), AudioSignal())},
)
def convert_spectrogram_to_audio(self, spec: 'torch.tensor') -> 'torch.tensor':
return self(spec=spec).squeeze(1)
def training_step(self, batch, batch_idx):
# if in finetune mode the mels are pre-computed using a
# spectrogram generator
if self.input_as_mel:
audio, audio_len, audio_mel = batch
# else, we compute the mel using the ground truth audio
else:
audio, audio_len = batch
# mel as input for generator
audio_mel, _ = self.audio_to_melspec_precessor(audio, audio_len)
# mel as input for L1 mel loss
audio_trg_mel, _ = self.trg_melspec_fn(audio, audio_len)
audio = audio.unsqueeze(1)
audio_pred = self.generator(x=audio_mel)
audio_pred_mel, _ = self.trg_melspec_fn(audio_pred.squeeze(1), audio_len)
# train discriminator
self.optim_d.zero_grad()
mpd_score_real, mpd_score_gen, _, _ = self.mpd(y=audio, y_hat=audio_pred.detach())
loss_disc_mpd, _, _ = self.discriminator_loss(
disc_real_outputs=mpd_score_real, disc_generated_outputs=mpd_score_gen
)
msd_score_real, msd_score_gen, _, _ = self.msd(y=audio, y_hat=audio_pred.detach())
loss_disc_msd, _, _ = self.discriminator_loss(
disc_real_outputs=msd_score_real, disc_generated_outputs=msd_score_gen
)
loss_d = loss_disc_msd + loss_disc_mpd
self.manual_backward(loss_d)
self.optim_d.step()
# train generator
self.optim_g.zero_grad()
loss_mel = F.l1_loss(audio_pred_mel, audio_trg_mel)
_, mpd_score_gen, fmap_mpd_real, fmap_mpd_gen = self.mpd(y=audio, y_hat=audio_pred)
_, msd_score_gen, fmap_msd_real, fmap_msd_gen = self.msd(y=audio, y_hat=audio_pred)
loss_fm_mpd = self.feature_loss(fmap_r=fmap_mpd_real, fmap_g=fmap_mpd_gen)
loss_fm_msd = self.feature_loss(fmap_r=fmap_msd_real, fmap_g=fmap_msd_gen)
loss_gen_mpd, _ = self.generator_loss(disc_outputs=mpd_score_gen)
loss_gen_msd, _ = self.generator_loss(disc_outputs=msd_score_gen)
loss_g = loss_gen_msd + loss_gen_mpd + loss_fm_msd + loss_fm_mpd + loss_mel * self.l1_factor
self.manual_backward(loss_g)
self.optim_g.step()
# run schedulers
schedulers = self.lr_schedulers()
if schedulers is not None:
sch1, sch2 = schedulers
sch1.step()
sch2.step()
metrics = {
"g_loss_fm_mpd": loss_fm_mpd,
"g_loss_fm_msd": loss_fm_msd,
"g_loss_gen_mpd": loss_gen_mpd,
"g_loss_gen_msd": loss_gen_msd,
"g_loss": loss_g,
"d_loss_mpd": loss_disc_mpd,
"d_loss_msd": loss_disc_msd,
"d_loss": loss_d,
"global_step": self.global_step,
"lr": self.optim_g.param_groups[0]['lr'],
}
self.log_dict(metrics, on_step=True, sync_dist=True)
self.log("g_l1_loss", loss_mel, prog_bar=True, logger=False, sync_dist=True)
def validation_step(self, batch, batch_idx):
if self.input_as_mel:
audio, audio_len, audio_mel = batch
audio_mel_len = [audio_mel.shape[1]] * audio_mel.shape[0]
else:
audio, audio_len = batch
audio_mel, audio_mel_len = self.audio_to_melspec_precessor(audio, audio_len)
audio_pred = self(spec=audio_mel)
# perform bias denoising
pred_denoised = self._bias_denoise(audio_pred, audio_mel).squeeze(1)
pred_denoised_mel, _ = self.audio_to_melspec_precessor(pred_denoised, audio_len)
if self.input_as_mel:
gt_mel, gt_mel_len = self.audio_to_melspec_precessor(audio, audio_len)
audio_pred_mel, _ = self.audio_to_melspec_precessor(audio_pred.squeeze(1), audio_len)
loss_mel = F.l1_loss(audio_mel, audio_pred_mel)
self.log_dict({"val_loss": loss_mel}, on_epoch=True, sync_dist=True)
# plot audio once per epoch
if batch_idx == 0 and isinstance(self.logger, WandbLogger) and HAVE_WANDB:
clips = []
specs = []
for i in range(min(5, audio.shape[0])):
clips += [
wandb.Audio(
audio[i, : audio_len[i]].data.cpu().numpy(),
caption=f"real audio {i}",
sample_rate=self.sample_rate,
),
wandb.Audio(
audio_pred[i, 0, : audio_len[i]].data.cpu().numpy().astype('float32'),
caption=f"generated audio {i}",
sample_rate=self.sample_rate,
),
wandb.Audio(
pred_denoised[i, : audio_len[i]].data.cpu().numpy(),
caption=f"denoised audio {i}",
sample_rate=self.sample_rate,
),
]
specs += [
wandb.Image(
plot_spectrogram_to_numpy(audio_mel[i, :, : audio_mel_len[i]].data.cpu().numpy()),
caption=f"input mel {i}",
),
wandb.Image(
plot_spectrogram_to_numpy(audio_pred_mel[i, :, : audio_mel_len[i]].data.cpu().numpy()),
caption=f"output mel {i}",
),
wandb.Image(
plot_spectrogram_to_numpy(pred_denoised_mel[i, :, : audio_mel_len[i]].data.cpu().numpy()),
caption=f"denoised mel {i}",
),
]
if self.input_as_mel:
specs += [
wandb.Image(
plot_spectrogram_to_numpy(gt_mel[i, :, : audio_mel_len[i]].data.cpu().numpy()),
caption=f"gt mel {i}",
),
]
self.logger.experiment.log({"audio": clips, "specs": specs})
def _bias_denoise(self, audio, mel):
def stft(x):
comp = torch.stft(x.squeeze(1), n_fft=1024, hop_length=256, win_length=1024)
real, imag = comp[..., 0], comp[..., 1]
mags = torch.sqrt(real ** 2 + imag ** 2)
phase = torch.atan2(imag, real)
return mags, phase
def istft(mags, phase):
comp = torch.stack([mags * torch.cos(phase), mags * torch.sin(phase)], dim=-1)
x = torch.istft(comp, n_fft=1024, hop_length=256, win_length=1024)
return x
# create bias tensor
if self.stft_bias is None or self.stft_bias.shape[0] != audio.shape[0]:
audio_bias = self(spec=torch.zeros_like(mel, device=mel.device))
self.stft_bias, _ = stft(audio_bias)
self.stft_bias = self.stft_bias[:, :, 0][:, :, None]
audio_mags, audio_phase = stft(audio)
audio_mags = audio_mags - self.cfg.get("denoise_strength", 0.0025) * self.stft_bias
audio_mags = torch.clamp(audio_mags, 0.0)
audio_denoised = istft(audio_mags, audio_phase).unsqueeze(1)
return audio_denoised
def __setup_dataloader_from_config(self, cfg, shuffle_should_be: bool = True, name: str = "train"):
if "dataset" not in cfg or not isinstance(cfg.dataset, DictConfig):
raise ValueError(f"No dataset for {name}")
if "dataloader_params" not in cfg or not isinstance(cfg.dataloader_params, DictConfig):
raise ValueError(f"No dataloder_params for {name}")
if shuffle_should_be:
if 'shuffle' not in cfg.dataloader_params:
logging.warning(
f"Shuffle should be set to True for {self}'s {name} dataloader but was not found in its "
"config. Manually setting to True"
)
with open_dict(cfg["dataloader_params"]):
cfg.dataloader_params.shuffle = True
elif not cfg.dataloader_params.shuffle:
logging.error(f"The {name} dataloader for {self} has shuffle set to False!!!")
elif not shuffle_should_be and cfg.dataloader_params.shuffle:
logging.error(f"The {name} dataloader for {self} has shuffle set to True!!!")
dataset = instantiate(cfg.dataset)
return torch.utils.data.DataLoader(dataset, collate_fn=dataset.collate_fn, **cfg.dataloader_params)
def setup_training_data(self, cfg):
self._train_dl = self.__setup_dataloader_from_config(cfg)
def setup_validation_data(self, cfg):
self._validation_dl = self.__setup_dataloader_from_config(cfg, shuffle_should_be=False, name="validation")
@classmethod
def list_available_models(cls) -> 'Optional[Dict[str, str]]':
list_of_models = []
model = PretrainedModelInfo(
pretrained_model_name="tts_hifigan",
location="https://api.ngc.nvidia.com/v2/models/nvidia/nemo/tts_hifigan/versions/1.0.0rc1/files/tts_hifigan.nemo",
description="This model is trained on LJSpeech audio sampled at 22050Hz and mel spectrograms generated from Tacotron2, TalkNet, and FastPitch. This model has been tested on generating female English voices with an American accent.",
class_=cls,
)
list_of_models.append(model)
model = PretrainedModelInfo(
pretrained_model_name="tts_en_lj_hifigan_ft_mixertts",
location="https://api.ngc.nvidia.com/v2/models/nvidia/nemo/tts_en_lj_hifigan/versions/1.6.0/files/tts_en_lj_hifigan_ft_mixertts.nemo",
description="This model is trained on LJSpeech audio sampled at 22050Hz and mel spectrograms generated from Mixer-TTS. This model has been tested on generating female English voices with an American accent.",
class_=cls,
)
list_of_models.append(model)
model = PretrainedModelInfo(
pretrained_model_name="tts_en_lj_hifigan_ft_mixerttsx",
location="https://api.ngc.nvidia.com/v2/models/nvidia/nemo/tts_en_lj_hifigan/versions/1.6.0/files/tts_en_lj_hifigan_ft_mixerttsx.nemo",
description="This model is trained on LJSpeech audio sampled at 22050Hz and mel spectrograms generated from Mixer-TTS-X. This model has been tested on generating female English voices with an American accent.",
class_=cls,
)
list_of_models.append(model)
return list_of_models
def load_state_dict(self, state_dict, strict=True):
# override load_state_dict to give us some flexibility to be backward-compatible
# with old checkpoints
new_state_dict = {}
num_resblocks = len(self.cfg['generator']['resblock_kernel_sizes'])
for k, v in state_dict.items():
new_k = k
if 'resblocks' in k:
parts = k.split(".")
# only do this is the checkpoint type is older
if len(parts) == 6:
layer = int(parts[2])
new_layer = f"{layer // num_resblocks}.{layer % num_resblocks}"
new_k = f"generator.resblocks.{new_layer}.{'.'.join(parts[3:])}"
new_state_dict[new_k] = v
super().load_state_dict(new_state_dict, strict=strict)
def _prepare_for_export(self, **kwargs):
"""
Override this method to prepare module for export. This is in-place operation.
Base version does common necessary module replacements (Apex etc)
"""
if self.generator is not None:
try:
self.generator.remove_weight_norm()
except ValueError:
return
def input_example(self, max_batch=1, max_dim=256):
"""
Generates input examples for tracing etc.
Returns:
A tuple of input examples.
"""
par = next(self.parameters())
mel = torch.randn((max_batch, self.cfg['preprocessor']['nfilt'], max_dim), device=par.device, dtype=par.dtype)
return ({'spec': mel},)
def forward_for_export(self, spec):
"""
Runs the generator, for inputs and outputs see input_types, and output_types
"""
return self.generator(x=spec)
| 44.00241
| 244
| 0.628826
|
e58ff600ac85499ff08e0800e03e0a11db2aa4c3
| 657
|
py
|
Python
|
backend/products/migrations/0001_initial.py
|
mahmoudabuelnaga/drf
|
320526a5b6a9f86bf0b76315a28076d11dfd4966
|
[
"MIT"
] | null | null | null |
backend/products/migrations/0001_initial.py
|
mahmoudabuelnaga/drf
|
320526a5b6a9f86bf0b76315a28076d11dfd4966
|
[
"MIT"
] | null | null | null |
backend/products/migrations/0001_initial.py
|
mahmoudabuelnaga/drf
|
320526a5b6a9f86bf0b76315a28076d11dfd4966
|
[
"MIT"
] | null | null | null |
# Generated by Django 4.0.3 on 2022-03-27 17:19
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Product',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=120)),
('content', models.TextField(blank=True, null=True)),
('price', models.DecimalField(decimal_places=2, default=99.99, max_digits=15)),
],
),
]
| 27.375
| 117
| 0.584475
|
20fe8612c5b16bb526b0d763c1174f10b31f9383
| 5,171
|
py
|
Python
|
todb.py
|
georgetown-analytics/triptomizer
|
3401ec4ff612407e68429eac9a57a08af7fbeaf3
|
[
"MIT"
] | 1
|
2017-02-24T01:57:03.000Z
|
2017-02-24T01:57:03.000Z
|
todb.py
|
georgetown-analytics/triptomizer
|
3401ec4ff612407e68429eac9a57a08af7fbeaf3
|
[
"MIT"
] | null | null | null |
todb.py
|
georgetown-analytics/triptomizer
|
3401ec4ff612407e68429eac9a57a08af7fbeaf3
|
[
"MIT"
] | 5
|
2015-10-03T19:52:14.000Z
|
2020-03-04T01:21:24.000Z
|
#!/usr/bin/env python
##########################################################################
## Imports
##########################################################################
import json #required to read and parse json files
import os #required for file path function
import psycopg2 #required for postgres
import sys #also required for postgres?
##########################################################################
## Module Variables/Constants
##########################################################################
DB = 'triptomizer'
user = 'postgres'
password = 'psqlpass'
host = 'localhost'
port = '5433'
conn_str="host={} port={} dbname={} user={} password={}".format(host, port, DB, user, password)
conn=psycopg2.connect(conn_str)
c=conn.cursor()
#create new tables if necessary
c.execute("CREATE TABLE IF NOT EXISTS drivingdata (id serial PRIMARY KEY, timeoftravel varchar(10), airport varchar(20), distance int, duration int);")
c.execute("CREATE TABLE IF NOT EXISTS flightdata (id serial PRIMARY KEY, airport varchar(3), departuretime varchar(24), arrivaltime varchar(24), duration int, cost varchar(20), tripid varchar(40), airline varchar(24));")
conn.commit()
#this is a table to create lookup table for airline codes and names
c.execute("CREATE TABLE IF NOT EXISTS airlinecodesall (id serial PRIMARY KEY, airlinecode varchar(10), airlinename varchar(40));")
#create paths to driving and flight data files
drivepath = os.path.join(os.getcwd(), 'data')
flightpath = os.getcwd()
#make lists of the files
drive_jsons = [j for j in os.listdir(drivepath) if j.endswith('.json')]
flight_jsons = [j for j in os.listdir(flightpath) if ((j.endswith('DCA.json')) or (j.endswith('IAD.json')) or (j.endswith('BWI.json')))]
#go through each json file, get the data you need, and save it to the db
for j in drive_jsons:
filename = os.path.join(drivepath, j)
with open(filename) as json_file:
data=json.load(json_file)
for d in data:
if d=='route':
distance=data["route"]["distance"]
durationmin=data["route"]["realTime"]
duration=durationmin/60
timeoftravel=data["route"]["options"]["localTime"]
airportpostalcode = data["route"]["locations"][1]["postalCode"]
SQL = "INSERT INTO drivingdata (distance, duration, timeoftravel, airport) VALUES (%s, %s, %s, %s);"
c.execute(SQL, (distance, duration, timeoftravel, airportpostalcode))
conn.commit()
#go through each flight file, get the data you need, and save it to db
for j in flight_jsons:
filename = os.path.join(flightpath,j)
airport=filename[-8:-5]
with open(filename, "rb") as json_file:
data=json.load(json_file)
for d in data:
if d == 'trips':
carriers = data['trips']['data']['carrier']
x=0
for car in carriers:
airlinecode=carriers[x]['code']
airlinename=carriers[x]['name']
SQL = "INSERT INTO airlinecodesall (airlinecode, airlinename) VALUES (%s, %s);"
c.execute(SQL, (airlinecode, airlinename))
conn.commit()
x=x+1
tripOptions = data['trips']['tripOption']
x=0
for t in tripOptions:
tripid=tripOptions[x]["id"]
cost=tripOptions[x]['saleTotal']
duration = tripOptions[x]['slice'][0]['duration']
airline=tripOptions[x]['slice'][0]['segment'][0]['flight']['carrier']
legs=tripOptions[x]['slice'][0]['segment']
for leg in legs:
if ((leg['leg'][0]['origin'])==airport):
departuretime = leg['leg'][0]['departureTime']
if ((leg['leg'][0]['destination'])=="LAX"):
arrivaltime = leg['leg'][0]['arrivalTime']
# put it in the table and save
SQL = "INSERT INTO flightdata (airport, departuretime, arrivaltime, duration, cost, tripid, airline) VALUES (%s, %s, %s, %s, %s, %s, %s);"
c.execute(SQL, (airport, departuretime, arrivaltime, duration, cost, tripid, airline))
conn.commit()
x=x+1
#populate the airline code and name lookup table
#update the values in the driving table to show airport code in place of airport zip code
c.execute("UPDATE drivingdata SET airport=%s WHERE airport=%s", ("IAD", "20166"))
c.execute("UPDATE drivingdata SET airport=%s WHERE airport=%s", ("DCA", "22202"))
c.execute("UPDATE drivingdata SET airport=%s WHERE airport=%s", ("BWI", "21240-2004"))
conn.commit()
#update the values in the flight table to show cost as a Number instead of a string (remove "USD")
c.execute("SELECT * FROM flightdata")
rows=c.fetchall()
for row in rows:
cost_string = row[5]
cost_number = cost_string[3:]
c.execute("UPDATE flightdata SET cost=%s WHERE cost=%s", (cost_number, cost_string))
conn.commit()
| 44.965217
| 220
| 0.574744
|
148461e4e09dfd461d226c42ed1fb05855ad6fd8
| 1,875
|
py
|
Python
|
board_parse.py
|
Christina0310/ECON900_ps1
|
de44f7dfb91abb23f6c85ae3d6bd3a4f305b43c6
|
[
"MIT"
] | null | null | null |
board_parse.py
|
Christina0310/ECON900_ps1
|
de44f7dfb91abb23f6c85ae3d6bd3a4f305b43c6
|
[
"MIT"
] | null | null | null |
board_parse.py
|
Christina0310/ECON900_ps1
|
de44f7dfb91abb23f6c85ae3d6bd3a4f305b43c6
|
[
"MIT"
] | null | null | null |
from bs4 import BeautifulSoup
import os
import glob
import pandas as pd
if not os.path.exists("parsed_files"):
os.mkdir("parsed_files")
df = pd.DataFrame()
for i in range(1,161):#one_file_name in glob.glob("html_files/*.html"):
current_page = str(i)
one_file_name ="html_files/boardgamegeek" + current_page + ".html"
print("parsing " + one_file_name)
f = open(one_file_name, "r")
soup = BeautifulSoup(f.read(), 'html.parser')
f.close()
boardgames_table = soup.find("table", {"id": "collectionitems"})
boardgames_rows = boardgames_table.find_all("tr", {"id":"row_"})
for r in boardgames_rows:
boardgames_name = r.find("td", {"class": "collection_objectname"}).find("div",{"style":"z-index:1000;"}).find("a").text
#boardgames_neg_year = r.find("td", {"class": "collection_objectname"}).find("div",{"style":"z-index:1000;"}).find("a").text[1]
boardgames_ratings= r.find_all("td",{"class":"collection_bggrating"})
boardgames_geek_ratingq= boardgames_ratings[0].text #next(game_iter).text
boardgames_ave_ratingq = boardgames_ratings[1].text
boardgames_num_votersq = boardgames_ratings[2].text
#boardgames_year = boardgames_neg_year[1:5]
boardgames_geek_rating = float(boardgames_geek_ratingq)
boardgames_ave_rating = float(boardgames_ave_ratingq)
boardgames_num_voters = int(boardgames_num_votersq)
df = df.append({
'title': boardgames_name,
#'year': boardgames_year,
'geek_rating': boardgames_geek_rating,
'average_rating': boardgames_ave_rating,
'num_of_voters': boardgames_num_voters
}, ignore_index=True)
df.to_csv("parsed_files/boardgeekgame_dataset.csv")
| 38.265306
| 139
| 0.638933
|
6f9f5e69c63141793a6c4287d2c87d66c89c2c97
| 6,781
|
py
|
Python
|
salt/cache/mysql_cache.py
|
tschmittni/salt
|
ccfcd5ed1272576799797ec7f259b676fd130585
|
[
"Apache-2.0"
] | 2
|
2018-11-08T02:59:24.000Z
|
2021-01-04T00:30:50.000Z
|
salt/cache/mysql_cache.py
|
The-Loeki/salt
|
8ff8212cc1eacfe409eb9cc017b21250f28dd305
|
[
"Apache-2.0"
] | 4
|
2020-09-04T10:19:34.000Z
|
2020-11-09T12:55:59.000Z
|
salt/cache/mysql_cache.py
|
The-Loeki/salt
|
8ff8212cc1eacfe409eb9cc017b21250f28dd305
|
[
"Apache-2.0"
] | 5
|
2017-06-16T23:48:13.000Z
|
2021-04-08T17:43:48.000Z
|
# -*- coding: utf-8 -*-
'''
Minion data cache plugin for MySQL database.
.. versionadded:: develop
It is up to the system administrator to set up and configure the MySQL
infrastructure. All is needed for this plugin is a working MySQL server.
The module requires the `salt_cache` database to exists but creates its own
table if needed. The keys are indexed using the `bank` and `etcd_key` columns.
To enable this cache plugin, the master will need the python client for
MySQL installed. This can be easily installed with pip:
.. code-block: bash
pip install python-mysql
Optionally, depending on the MySQL agent configuration, the following values
could be set in the master config. These are the defaults:
.. code-block:: yaml
mysql.host: 127.0.0.1
mysql.port: 2379
mysql.user: None
mysql.password: None
mysql.database: salt_cache
mysql.table_name: cache
Related docs could be found in the `python-mysql documentation`_.
To use the mysql as a minion data cache backend, set the master ``cache`` config
value to ``mysql``:
.. code-block:: yaml
cache: mysql
.. _`MySQL documentation`: https://github.com/coreos/mysql
.. _`python-mysql documentation`: http://python-mysql.readthedocs.io/en/latest/
'''
from __future__ import absolute_import, print_function, unicode_literals
from time import sleep
import logging
try:
import MySQLdb
HAS_MYSQL = True
except ImportError:
HAS_MYSQL = False
from salt.exceptions import SaltCacheError
_DEFAULT_DATABASE_NAME = "salt_cache"
_DEFAULT_CACHE_TABLE_NAME = "cache"
_RECONNECT_INTERVAL_SEC = 0.050
log = logging.getLogger(__name__)
client = None
_mysql_kwargs = None
_table_name = None
# Module properties
__virtualname__ = 'mysql'
__func_alias__ = {'ls': 'list'}
def __virtual__():
'''
Confirm that python-mysql package is installed.
'''
if not HAS_MYSQL:
return (False, "Please install python-mysql package to use mysql data "
"cache driver")
return __virtualname__
def run_query(conn, query, retries=3):
'''
Get a cursor and run a query. Reconnect up to `retries` times if
needed.
Returns: cursor, affected rows counter
Raises: SaltCacheError, AttributeError, MySQLdb.OperationalError
'''
try:
cur = conn.cursor()
out = cur.execute(query)
return cur, out
except (AttributeError, MySQLdb.OperationalError) as e:
if retries == 0:
raise
# reconnect creating new client
sleep(_RECONNECT_INTERVAL_SEC)
if conn is None:
log.debug("mysql_cache: creating db connection")
else:
log.info("mysql_cache: recreating db connection due to: %r", e)
global client
client = MySQLdb.connect(**_mysql_kwargs)
return run_query(client, query, retries - 1)
except Exception as e:
if len(query) > 150:
query = query[:150] + "<...>"
raise SaltCacheError("Error running {0}: {1}".format(query, e))
def _create_table():
'''
Create table if needed
'''
# Explicitely check if the table already exists as the library logs a
# warning on CREATE TABLE
query = """SELECT COUNT(TABLE_NAME) FROM information_schema.tables
WHERE table_schema = '{0}' AND table_name = '{1}'""".format(
_mysql_kwargs['db'],
_table_name,
)
cur, _ = run_query(client, query)
r = cur.fetchone()
cur.close()
if r[0] == 1:
return
query = """CREATE TABLE IF NOT EXISTS {0} (
bank CHAR(255),
etcd_key CHAR(255),
data MEDIUMBLOB,
PRIMARY KEY(bank, etcd_key)
);""".format(_table_name)
log.info("mysql_cache: creating table %s", _table_name)
cur, _ = run_query(client, query)
cur.close()
def _init_client():
"""Initialize connection and create table if needed
"""
if client is not None:
return
global _mysql_kwargs, _table_name
_mysql_kwargs = {
'host': __opts__.get('mysql.host', '127.0.0.1'),
'user': __opts__.get('mysql.user', None),
'passwd': __opts__.get('mysql.password', None),
'db': __opts__.get('mysql.database', _DEFAULT_DATABASE_NAME),
'port': __opts__.get('mysql.port', 3306),
'unix_socket': __opts__.get('mysql.unix_socket', None),
'connect_timeout': __opts__.get('mysql.connect_timeout', None),
'autocommit': True,
}
_table_name = __opts__.get('mysql.table_name', _table_name)
# TODO: handle SSL connection parameters
for k, v in _mysql_kwargs.items():
if v is None:
_mysql_kwargs.pop(k)
kwargs_copy = _mysql_kwargs.copy()
kwargs_copy['passwd'] = "<hidden>"
log.info("mysql_cache: Setting up client with params: %r", kwargs_copy)
# The MySQL client is created later on by run_query
_create_table()
def store(bank, key, data):
'''
Store a key value.
'''
_init_client()
data = __context__['serial'].dumps(data)
query = b"REPLACE INTO {0} (bank, etcd_key, data) values('{1}', '{2}', " \
b"'{3}')".format(_table_name,
bank,
key,
data)
cur, cnt = run_query(client, query)
cur.close()
if cnt not in (1, 2):
raise SaltCacheError(
'Error storing {0} {1} returned {2}'.format(bank, key, cnt)
)
def fetch(bank, key):
'''
Fetch a key value.
'''
_init_client()
query = "SELECT data FROM {0} WHERE bank='{1}' AND etcd_key='{2}'".format(
_table_name, bank, key)
cur, _ = run_query(client, query)
r = cur.fetchone()
cur.close()
if r is None:
return {}
return __context__['serial'].loads(r[0])
def flush(bank, key=None):
'''
Remove the key from the cache bank with all the key content.
'''
_init_client()
query = "DELETE FROM {0} WHERE bank='{1}'".format(_table_name, bank)
if key is not None:
query += " AND etcd_key='{0}'".format(key)
cur, _ = run_query(client, query)
cur.close()
def ls(bank):
'''
Return an iterable object containing all entries stored in the specified
bank.
'''
_init_client()
query = "SELECT etcd_key FROM {0} WHERE bank='{1}'".format(
_table_name, bank)
cur, _ = run_query(client, query)
out = [row[0] for row in cur.fetchall()]
cur.close()
return out
def contains(bank, key):
'''
Checks if the specified bank contains the specified key.
'''
_init_client()
query = "SELECT COUNT(data) FROM {0} WHERE bank='{1}' " \
"AND etcd_key='{2}'".format(_table_name, bank, key)
cur, _ = run_query(client, query)
r = cur.fetchone()
cur.close()
return r[0] == 1
| 28.020661
| 80
| 0.633977
|
cc148316594c8d23db47c3224a49c737dae893a3
| 1,430
|
py
|
Python
|
www/models.py
|
wolfworld/python-webapp
|
dcc57632b59bb3c5517f157c484702f17f29179a
|
[
"Apache-2.0"
] | 2
|
2017-07-28T02:27:37.000Z
|
2017-07-31T08:43:05.000Z
|
www/models.py
|
wolfworld/python-webapp
|
dcc57632b59bb3c5517f157c484702f17f29179a
|
[
"Apache-2.0"
] | null | null | null |
www/models.py
|
wolfworld/python-webapp
|
dcc57632b59bb3c5517f157c484702f17f29179a
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import time,uuid
from www.static.orm import Model,IntegerField,StringField,BooleanField,FloatField,TextField
def next_id():
return '%015d%s000' % (int(time.time() * 1000), uuid.uuid4().hex)
# 用户Model
class User(Model):
__table__ = "users"
id = StringField(primary_key=True,default=next_id(),ddl='varcher(50)')
name = StringField(ddl='varcher(50)')
email = StringField(ddl='varcher(50)')
passwd = StringField(ddl='varcher(50)')
admin = BooleanField()
image = StringField(ddl='varcher(500)')
created_at = FloatField(default=time.time)
# 博客Model
class Blogs(Model):
__table__ = "blogs"
id = StringField(primary_key=True, default=next_id(), ddl='varcher(50)')
user_id = StringField(ddl='varcher(50)')
user_name = StringField(ddl='varcher(50)')
user_image = StringField(ddl='varcher(500)')
name = StringField()
summary = StringField(ddl='varchar(200)')
content = TextField()
created_at = FloatField(default=time.time)
# 评论Model
class Comment(Model):
__table__ = "comments"
id = StringField(primary_key=True, default=next_id, ddl='varchar(50)')
blog_id = StringField(ddl='varcher(50)')
user_id = StringField(ddl='varchar(50)')
user_name = StringField(ddl='varchar(50)')
user_image = StringField(ddl='varchar(500)')
content = TextField()
created_at = FloatField(default=time.time)
| 33.255814
| 91
| 0.686014
|
9b4887df5c411afa5461063f1a769d68318bff1a
| 31,496
|
py
|
Python
|
plaso/parsers/mac_keychain.py
|
pyllyukko/plaso
|
7533db2d1035ca71d264d6281ebd5db2d073c587
|
[
"Apache-2.0"
] | 2
|
2019-10-23T03:37:59.000Z
|
2020-08-14T17:09:26.000Z
|
plaso/parsers/mac_keychain.py
|
pyllyukko/plaso
|
7533db2d1035ca71d264d6281ebd5db2d073c587
|
[
"Apache-2.0"
] | null | null | null |
plaso/parsers/mac_keychain.py
|
pyllyukko/plaso
|
7533db2d1035ca71d264d6281ebd5db2d073c587
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
"""Parser for MacOS keychain database files."""
import codecs
import collections
from dfdatetime import time_elements as dfdatetime_time_elements
from dtfabric.runtime import data_maps as dtfabric_data_maps
from plaso.containers import events
from plaso.containers import time_events
from plaso.lib import errors
from plaso.lib import specification
from plaso.lib import definitions
from plaso.parsers import dtfabric_parser
from plaso.parsers import manager
class KeychainInternetRecordEventData(events.EventData):
"""MacOS keychain internet record event data.
Attributes:
account_name (str): name of the account.
comments (str): comments added by the user.
entry_name (str): name of the entry.
protocol (str): internet protocol used, for example "https".
ssgp_hash (str): password/certificate hash formatted as an hexadecimal
string.
text_description (str): description.
type_protocol (str): sub-protocol used, for example "form".
where (str): domain name or IP where the password is used.
"""
DATA_TYPE = 'mac:keychain:internet'
def __init__(self):
"""Initializes event data."""
super(KeychainInternetRecordEventData, self).__init__(
data_type=self.DATA_TYPE)
self.account_name = None
self.comments = None
self.entry_name = None
self.protocol = None
self.ssgp_hash = None
self.text_description = None
self.type_protocol = None
self.where = None
# TODO: merge with KeychainInternetRecordEventData.
class KeychainApplicationRecordEventData(events.EventData):
"""MacOS keychain application password record event data.
Attributes:
account_name (str): name of the account.
comments (str): comments added by the user.
entry_name (str): name of the entry.
ssgp_hash (str): password/certificate hash formatted as an hexadecimal
string.
text_description (str): description.
"""
DATA_TYPE = 'mac:keychain:application'
def __init__(self):
"""Initializes event data."""
super(KeychainApplicationRecordEventData, self).__init__(
data_type=self.DATA_TYPE)
self.account_name = None
self.comments = None
self.entry_name = None
self.ssgp_hash = None
self.text_description = None
class KeychainDatabaseColumn(object):
"""MacOS keychain database column.
Attributes:
attribute_data_type (int): attribute (data) type.
attribute_identifier (int): attribute identifier.
attribute_name (str): attribute name.
"""
def __init__(self):
"""Initializes a MacOS keychain database column."""
super(KeychainDatabaseColumn, self).__init__()
self.attribute_data_type = None
self.attribute_identifier = None
self.attribute_name = None
class KeychainDatabaseTable(object):
"""MacOS keychain database table.
Attributes:
columns (list[KeychainDatabaseColumn]): columns.
records (list[dict[str, str]]): records.
relation_identifier (int): relation identifier.
relation_name (str): relation name.
"""
def __init__(self):
"""Initializes a MacOS keychain database table."""
super(KeychainDatabaseTable, self).__init__()
self.columns = []
self.records = []
self.relation_identifier = None
self.relation_name = None
class KeychainParser(dtfabric_parser.DtFabricBaseParser):
"""Parser for MacOS keychain database files."""
NAME = 'mac_keychain'
DATA_FORMAT = 'MacOS keychain database file'
_DEFINITION_FILE = 'mac_keychain.yaml'
_MAJOR_VERSION = 1
_MINOR_VERSION = 0
# TODO: add more protocols.
_PROTOCOL_TRANSLATION_DICT = {
'htps': 'https',
'smtp': 'smtp',
'imap': 'imap',
'http': 'http'}
_RECORD_TYPE_CSSM_DL_DB_SCHEMA_INFO = 0x00000000
_RECORD_TYPE_CSSM_DL_DB_SCHEMA_INDEXES = 0x00000001
_RECORD_TYPE_CSSM_DL_DB_SCHEMA_ATTRIBUTES = 0x00000002
_RECORD_TYPE_APPLICATION_PASSWORD = 0x80000000
_RECORD_TYPE_INTERNET_PASSWORD = 0x80000001
_ATTRIBUTE_DATA_READ_FUNCTIONS = {
0: '_ReadAttributeValueString',
1: '_ReadAttributeValueInteger',
2: '_ReadAttributeValueInteger',
5: '_ReadAttributeValueDateTime',
6: '_ReadAttributeValueBinaryData'}
def _ReadAttributeValueBinaryData(
self, attribute_values_data, record_offset, attribute_values_data_offset,
attribute_value_offset):
"""Reads a binary data attribute value.
Args:
attribute_values_data (bytes): attribute values data.
record_offset (int): offset of the record relative to the start of
the file.
attribute_values_data_offset (int): offset of the attribute values data
relative to the start of the record.
attribute_value_offset (int): offset of the attribute relative to
the start of the record.
Returns:
bytes: binary data value or None if attribute value offset is not set.
Raises:
ParseError: if the attribute value cannot be read.
"""
if attribute_value_offset == 0:
return None
data_type_map = self._GetDataTypeMap('keychain_blob')
file_offset = (
record_offset + attribute_values_data_offset + attribute_value_offset)
attribute_value_offset -= attribute_values_data_offset + 1
attribute_value_data = attribute_values_data[attribute_value_offset:]
try:
string_attribute_value = self._ReadStructureFromByteStream(
attribute_value_data, file_offset, data_type_map)
except (ValueError, errors.ParseError) as exception:
raise errors.ParseError((
'Unable to map binary data attribute value data at offset: 0x{0:08x} '
'with error: {1!s}').format(file_offset, exception))
return string_attribute_value.blob
def _ReadAttributeValueDateTime(
self, attribute_values_data, record_offset, attribute_values_data_offset,
attribute_value_offset):
"""Reads a date time attribute value.
Args:
attribute_values_data (bytes): attribute values data.
record_offset (int): offset of the record relative to the start of
the file.
attribute_values_data_offset (int): offset of the attribute values data
relative to the start of the record.
attribute_value_offset (int): offset of the attribute relative to
the start of the record.
Returns:
str: date and time values.
Raises:
ParseError: if the attribute value cannot be read.
"""
if attribute_value_offset == 0:
return None
data_type_map = self._GetDataTypeMap('keychain_date_time')
file_offset = (
record_offset + attribute_values_data_offset + attribute_value_offset)
attribute_value_offset -= attribute_values_data_offset + 1
attribute_value_data = attribute_values_data[attribute_value_offset:]
try:
date_time_attribute_value = self._ReadStructureFromByteStream(
attribute_value_data, file_offset, data_type_map)
except (ValueError, errors.ParseError) as exception:
raise errors.ParseError((
'Unable to map date time attribute value data at offset: 0x{0:08x} '
'with error: {1!s}').format(file_offset, exception))
return date_time_attribute_value.date_time.rstrip('\x00')
def _ReadAttributeValueInteger(
self, attribute_values_data, record_offset, attribute_values_data_offset,
attribute_value_offset):
"""Reads an integer attribute value.
Args:
attribute_values_data (bytes): attribute values data.
record_offset (int): offset of the record relative to the start of
the file.
attribute_values_data_offset (int): offset of the attribute values data
relative to the start of the record.
attribute_value_offset (int): offset of the attribute relative to
the start of the record.
Returns:
int: integer value or None if attribute value offset is not set.
Raises:
ParseError: if the attribute value cannot be read.
"""
if attribute_value_offset == 0:
return None
data_type_map = self._GetDataTypeMap('uint32be')
file_offset = (
record_offset + attribute_values_data_offset + attribute_value_offset)
attribute_value_offset -= attribute_values_data_offset + 1
attribute_value_data = attribute_values_data[attribute_value_offset:]
try:
return self._ReadStructureFromByteStream(
attribute_value_data, file_offset, data_type_map)
except (ValueError, errors.ParseError) as exception:
raise errors.ParseError((
'Unable to map integer attribute value data at offset: 0x{0:08x} '
'with error: {1!s}').format(file_offset, exception))
def _ReadAttributeValueString(
self, attribute_values_data, record_offset, attribute_values_data_offset,
attribute_value_offset):
"""Reads a string attribute value.
Args:
attribute_values_data (bytes): attribute values data.
record_offset (int): offset of the record relative to the start of
the file.
attribute_values_data_offset (int): offset of the attribute values data
relative to the start of the record.
attribute_value_offset (int): offset of the attribute relative to
the start of the record.
Returns:
str: string value or None if attribute value offset is not set.
Raises:
ParseError: if the attribute value cannot be read.
"""
if attribute_value_offset == 0:
return None
data_type_map = self._GetDataTypeMap('keychain_string')
file_offset = (
record_offset + attribute_values_data_offset + attribute_value_offset)
attribute_value_offset -= attribute_values_data_offset + 1
attribute_value_data = attribute_values_data[attribute_value_offset:]
try:
string_attribute_value = self._ReadStructureFromByteStream(
attribute_value_data, file_offset, data_type_map)
except (ValueError, errors.ParseError) as exception:
raise errors.ParseError((
'Unable to map string attribute value data at offset: 0x{0:08x} '
'with error: {1!s}').format(file_offset, exception))
return string_attribute_value.string
def _ReadFileHeader(self, file_object):
"""Reads the file header.
Args:
file_object (file): file-like object.
Returns:
keychain_file_header: file header.
Raises:
ParseError: if the file header cannot be read.
"""
data_type_map = self._GetDataTypeMap('keychain_file_header')
file_header, _ = self._ReadStructureFromFileObject(
file_object, 0, data_type_map)
if (file_header.major_format_version != self._MAJOR_VERSION or
file_header.minor_format_version != self._MINOR_VERSION):
raise errors.ParseError('Unsupported format version: {0:s}.{1:s}'.format(
file_header.major_format_version, file_header.minor_format_version))
return file_header
def _ReadRecord(self, tables, file_object, record_offset, record_type):
"""Reads the record.
Args:
tables (dict[int, KeychainDatabaseTable]): tables per identifier.
file_object (file): file-like object.
record_offset (int): offset of the record relative to the start of
the file.
record_type (int): record type, which should correspond to a relation
identifier of a table defined in the schema.
Raises:
ParseError: if the record cannot be read.
"""
table = tables.get(record_type, None)
if not table:
raise errors.ParseError(
'Missing table for relation identifier: 0x{0:08}'.format(record_type))
record_header = self._ReadRecordHeader(file_object, record_offset)
record = collections.OrderedDict()
if table.columns:
attribute_value_offsets = self._ReadRecordAttributeValueOffset(
file_object, record_offset + 24, len(table.columns))
file_offset = file_object.tell()
record_data_offset = file_offset - record_offset
record_data_size = record_header.data_size - (file_offset - record_offset)
record_data = file_object.read(record_data_size)
if record_header.key_data_size > 0:
record['_key_'] = record_data[:record_header.key_data_size]
if table.columns:
for index, column in enumerate(table.columns):
attribute_data_read_function = self._ATTRIBUTE_DATA_READ_FUNCTIONS.get(
column.attribute_data_type, None)
if attribute_data_read_function:
attribute_data_read_function = getattr(
self, attribute_data_read_function, None)
if not attribute_data_read_function:
attribute_value = None
else:
attribute_value = attribute_data_read_function(
record_data, record_offset, record_data_offset,
attribute_value_offsets[index])
record[column.attribute_name] = attribute_value
table.records.append(record)
def _ReadRecordAttributeValueOffset(
self, file_object, file_offset, number_of_attribute_values):
"""Reads the record attribute value offsets.
Args:
file_object (file): file-like object.
file_offset (int): offset of the record attribute values offsets relative
to the start of the file.
number_of_attribute_values (int): number of attribute values.
Returns:
keychain_record_attribute_value_offsets: record attribute value offsets.
Raises:
ParseError: if the record attribute value offsets cannot be read.
"""
offsets_data_size = number_of_attribute_values * 4
offsets_data = file_object.read(offsets_data_size)
context = dtfabric_data_maps.DataTypeMapContext(values={
'number_of_attribute_values': number_of_attribute_values})
data_type_map = self._GetDataTypeMap(
'keychain_record_attribute_value_offsets')
try:
attribute_value_offsets = self._ReadStructureFromByteStream(
offsets_data, file_offset, data_type_map, context=context)
except (ValueError, errors.ParseError) as exception:
raise errors.ParseError((
'Unable to map record attribute value offsets data at offset: '
'0x{0:08x} with error: {1!s}').format(file_offset, exception))
return attribute_value_offsets
def _ReadRecordHeader(self, file_object, record_header_offset):
"""Reads the record header.
Args:
file_object (file): file-like object.
record_header_offset (int): offset of the record header relative to
the start of the file.
Returns:
keychain_record_header: record header.
Raises:
ParseError: if the record header cannot be read.
"""
data_type_map = self._GetDataTypeMap('keychain_record_header')
record_header, _ = self._ReadStructureFromFileObject(
file_object, record_header_offset, data_type_map)
return record_header
def _ReadRecordSchemaAttributes(self, tables, file_object, record_offset):
"""Reads a schema attributes (CSSM_DL_DB_SCHEMA_ATTRIBUTES) record.
Args:
tables (dict[int, KeychainDatabaseTable]): tables per identifier.
file_object (file): file-like object.
record_offset (int): offset of the record relative to the start of
the file.
Raises:
ParseError: if the record cannot be read.
"""
record_header = self._ReadRecordHeader(file_object, record_offset)
attribute_value_offsets = self._ReadRecordAttributeValueOffset(
file_object, record_offset + 24, 6)
file_offset = file_object.tell()
attribute_values_data_offset = file_offset - record_offset
attribute_values_data_size = record_header.data_size - (
file_offset - record_offset)
attribute_values_data = file_object.read(attribute_values_data_size)
relation_identifier = self._ReadAttributeValueInteger(
attribute_values_data, record_offset, attribute_values_data_offset,
attribute_value_offsets[0])
attribute_identifier = self._ReadAttributeValueInteger(
attribute_values_data, record_offset, attribute_values_data_offset,
attribute_value_offsets[1])
attribute_name_data_type = self._ReadAttributeValueInteger(
attribute_values_data, record_offset, attribute_values_data_offset,
attribute_value_offsets[2])
attribute_name = self._ReadAttributeValueString(
attribute_values_data, record_offset, attribute_values_data_offset,
attribute_value_offsets[3])
# TODO: handle attribute_value_offsets[4]
attribute_data_type = self._ReadAttributeValueInteger(
attribute_values_data, record_offset, attribute_values_data_offset,
attribute_value_offsets[5])
table = tables.get(relation_identifier, None)
if not table:
raise errors.ParseError(
'Missing table for relation identifier: 0x{0:08}'.format(
relation_identifier))
if attribute_name is None and attribute_value_offsets[1] != 0:
attribute_value_offset = attribute_value_offsets[1]
attribute_value_offset -= attribute_values_data_offset + 1
attribute_name = attribute_values_data[
attribute_value_offset:attribute_value_offset + 4]
attribute_name = attribute_name.decode('ascii')
column = KeychainDatabaseColumn()
column.attribute_data_type = attribute_data_type
column.attribute_identifier = attribute_identifier
column.attribute_name = attribute_name
table.columns.append(column)
table = tables.get(self._RECORD_TYPE_CSSM_DL_DB_SCHEMA_ATTRIBUTES, None)
if not table:
raise errors.ParseError('Missing CSSM_DL_DB_SCHEMA_ATTRIBUTES table.')
record = collections.OrderedDict({
'RelationID': relation_identifier,
'AttributeID': attribute_identifier,
'AttributeNameFormat': attribute_name_data_type,
'AttributeName': attribute_name,
'AttributeFormat': attribute_data_type})
table.records.append(record)
def _ReadRecordSchemaIndexes(self, tables, file_object, record_offset):
"""Reads a schema indexes (CSSM_DL_DB_SCHEMA_INDEXES) record.
Args:
tables (dict[int, KeychainDatabaseTable]): tables per identifier.
file_object (file): file-like object.
record_offset (int): offset of the record relative to the start of
the file.
Raises:
ParseError: if the record cannot be read.
"""
_ = self._ReadRecordHeader(file_object, record_offset)
attribute_value_offsets = self._ReadRecordAttributeValueOffset(
file_object, record_offset + 24, 5)
if attribute_value_offsets != (0x2d, 0x31, 0x35, 0x39, 0x3d):
raise errors.ParseError('Unsupported record attribute value offsets')
file_offset = file_object.tell()
data_type_map = self._GetDataTypeMap('keychain_record_schema_indexes')
record_values, _ = self._ReadStructureFromFileObject(
file_object, file_offset, data_type_map)
if record_values.relation_identifier not in tables:
raise errors.ParseError(
'CSSM_DL_DB_SCHEMA_INDEXES defines relation identifier not defined '
'in CSSM_DL_DB_SCHEMA_INFO.')
table = tables.get(self._RECORD_TYPE_CSSM_DL_DB_SCHEMA_INDEXES, None)
if not table:
raise errors.ParseError('Missing CSSM_DL_DB_SCHEMA_INDEXES table.')
record = collections.OrderedDict({
'RelationID': record_values.relation_identifier,
'IndexID': record_values.index_identifier,
'AttributeID': record_values.attribute_identifier,
'IndexType': record_values.index_type,
'IndexedDataLocation': record_values.index_data_location})
table.records.append(record)
def _ReadRecordSchemaInformation(self, tables, file_object, record_offset):
"""Reads a schema information (CSSM_DL_DB_SCHEMA_INFO) record.
Args:
tables (dict[int, KeychainDatabaseTable]): tables per identifier.
file_object (file): file-like object.
record_offset (int): offset of the record relative to the start of
the file.
Raises:
ParseError: if the record cannot be read.
"""
_ = self._ReadRecordHeader(file_object, record_offset)
attribute_value_offsets = self._ReadRecordAttributeValueOffset(
file_object, record_offset + 24, 2)
if attribute_value_offsets != (0x21, 0x25):
raise errors.ParseError('Unsupported record attribute value offsets')
file_offset = file_object.tell()
data_type_map = self._GetDataTypeMap('keychain_record_schema_information')
record_values, _ = self._ReadStructureFromFileObject(
file_object, file_offset, data_type_map)
relation_name = record_values.relation_name.decode('ascii')
table = KeychainDatabaseTable()
table.relation_identifier = record_values.relation_identifier
table.relation_name = relation_name
tables[table.relation_identifier] = table
table = tables.get(self._RECORD_TYPE_CSSM_DL_DB_SCHEMA_INFO, None)
if not table:
raise errors.ParseError('Missing CSSM_DL_DB_SCHEMA_INFO table.')
record = collections.OrderedDict({
'RelationID': record_values.relation_identifier,
'RelationName': relation_name})
table.records.append(record)
def _ReadTable(self, tables, file_object, table_offset):
"""Reads the table.
Args:
tables (dict[int, KeychainDatabaseTable]): tables per identifier.
file_object (file): file-like object.
table_offset (int): offset of the table relative to the start of
the file.
Raises:
ParseError: if the table cannot be read.
"""
table_header = self._ReadTableHeader(file_object, table_offset)
for record_offset in table_header.record_offsets:
if record_offset == 0:
continue
record_offset += table_offset
if table_header.record_type == self._RECORD_TYPE_CSSM_DL_DB_SCHEMA_INFO:
self._ReadRecordSchemaInformation(tables, file_object, record_offset)
elif table_header.record_type == (
self._RECORD_TYPE_CSSM_DL_DB_SCHEMA_INDEXES):
self._ReadRecordSchemaIndexes(tables, file_object, record_offset)
elif table_header.record_type == (
self._RECORD_TYPE_CSSM_DL_DB_SCHEMA_ATTRIBUTES):
self._ReadRecordSchemaAttributes(tables, file_object, record_offset)
else:
self._ReadRecord(
tables, file_object, record_offset, table_header.record_type)
def _ReadTableHeader(self, file_object, table_header_offset):
"""Reads the table header.
Args:
file_object (file): file-like object.
table_header_offset (int): offset of the tables header relative to
the start of the file.
Returns:
keychain_table_header: table header.
Raises:
ParseError: if the table header cannot be read.
"""
data_type_map = self._GetDataTypeMap('keychain_table_header')
table_header, _ = self._ReadStructureFromFileObject(
file_object, table_header_offset, data_type_map)
return table_header
def _ReadTablesArray(self, file_object, tables_array_offset):
"""Reads the tables array.
Args:
file_object (file): file-like object.
tables_array_offset (int): offset of the tables array relative to
the start of the file.
Returns:
dict[int, KeychainDatabaseTable]: tables per identifier.
Raises:
ParseError: if the tables array cannot be read.
"""
# TODO: implement https://github.com/libyal/dtfabric/issues/12 and update
# keychain_tables_array definition.
data_type_map = self._GetDataTypeMap('keychain_tables_array')
tables_array, _ = self._ReadStructureFromFileObject(
file_object, tables_array_offset, data_type_map)
tables = collections.OrderedDict()
for table_offset in tables_array.table_offsets:
self._ReadTable(tables, file_object, tables_array_offset + table_offset)
return tables
def _ParseDateTimeValue(self, parser_mediator, date_time_value):
"""Parses a date time value.
Args:
parser_mediator (ParserMediator): mediates interactions between parsers
and other components, such as storage and dfvfs.
date_time_value (str): date time value
(CSSM_DB_ATTRIBUTE_FORMAT_TIME_DATE) in the format: "YYYYMMDDhhmmssZ".
Returns:
dfdatetime.TimeElements: date and time extracted from the value or None
if the value does not represent a valid string.
"""
if date_time_value[14] != 'Z':
parser_mediator.ProduceExtractionWarning(
'invalid date and time value: {0!s}'.format(date_time_value))
return None
try:
year = int(date_time_value[0:4], 10)
month = int(date_time_value[4:6], 10)
day_of_month = int(date_time_value[6:8], 10)
hours = int(date_time_value[8:10], 10)
minutes = int(date_time_value[10:12], 10)
seconds = int(date_time_value[12:14], 10)
except (TypeError, ValueError):
parser_mediator.ProduceExtractionWarning(
'invalid date and time value: {0!s}'.format(date_time_value))
return None
time_elements_tuple = (year, month, day_of_month, hours, minutes, seconds)
try:
return dfdatetime_time_elements.TimeElements(
time_elements_tuple=time_elements_tuple)
except ValueError:
parser_mediator.ProduceExtractionWarning(
'invalid date and time value: {0!s}'.format(date_time_value))
return None
def _ParseBinaryDataAsString(self, parser_mediator, binary_data_value):
"""Parses a binary data value as string
Args:
parser_mediator (ParserMediator): mediates interactions between parsers
and other components, such as storage and dfvfs.
binary_data_value (bytes): binary data value
(CSSM_DB_ATTRIBUTE_FORMAT_BLOB)
Returns:
str: binary data value formatted as a string or None if no string could
be extracted or binary data value is None (NULL).
"""
if not binary_data_value:
return None
try:
return binary_data_value.decode('utf-8')
except UnicodeDecodeError:
parser_mediator.ProduceExtractionWarning(
'invalid binary data string value: {0:s}'.format(
repr(binary_data_value)))
return None
def _ParseApplicationPasswordRecord(self, parser_mediator, record):
"""Extracts the information from an application password record.
Args:
parser_mediator (ParserMediator): mediates interactions between parsers
and other components, such as storage and dfvfs.
record (dict[str, object]): database record.
Raises:
ParseError: if Internet password record cannot be parsed.
"""
key = record.get('_key_', None)
if not key or not key.startswith(b'ssgp'):
raise errors.ParseError((
'Unsupported application password record key value does not start '
'with: "ssgp".'))
event_data = KeychainApplicationRecordEventData()
event_data.account_name = self._ParseBinaryDataAsString(
parser_mediator, record['acct'])
event_data.comments = self._ParseBinaryDataAsString(
parser_mediator, record['crtr'])
event_data.entry_name = self._ParseBinaryDataAsString(
parser_mediator, record['PrintName'])
ssgp_hash = codecs.encode(key[4:], 'hex')
event_data.ssgp_hash = codecs.decode(ssgp_hash, 'utf-8')
event_data.text_description = self._ParseBinaryDataAsString(
parser_mediator, record['desc'])
date_time = self._ParseDateTimeValue(parser_mediator, record['cdat'])
if date_time:
event = time_events.DateTimeValuesEvent(
date_time, definitions.TIME_DESCRIPTION_CREATION)
parser_mediator.ProduceEventWithEventData(event, event_data)
date_time = self._ParseDateTimeValue(parser_mediator, record['mdat'])
if date_time:
event = time_events.DateTimeValuesEvent(
date_time, definitions.TIME_DESCRIPTION_MODIFICATION)
parser_mediator.ProduceEventWithEventData(event, event_data)
def _ParseInternetPasswordRecord(self, parser_mediator, record):
"""Extracts the information from an Internet password record.
Args:
parser_mediator (ParserMediator): mediates interactions between parsers
and other components, such as storage and dfvfs.
record (dict[str, object]): database record.
Raises:
ParseError: if Internet password record cannot be parsed.
"""
key = record.get('_key_', None)
if not key or not key.startswith(b'ssgp'):
raise errors.ParseError((
'Unsupported Internet password record key value does not start '
'with: "ssgp".'))
protocol_string = codecs.decode('{0:08x}'.format(record['ptcl']), 'hex')
protocol_string = codecs.decode(protocol_string, 'utf-8')
event_data = KeychainInternetRecordEventData()
event_data.account_name = self._ParseBinaryDataAsString(
parser_mediator, record['acct'])
event_data.comments = self._ParseBinaryDataAsString(
parser_mediator, record['crtr'])
event_data.entry_name = self._ParseBinaryDataAsString(
parser_mediator, record['PrintName'])
event_data.protocol = self._PROTOCOL_TRANSLATION_DICT.get(
protocol_string, protocol_string)
ssgp_hash = codecs.encode(key[4:], 'hex')
event_data.ssgp_hash = codecs.decode(ssgp_hash, 'utf-8')
event_data.text_description = self._ParseBinaryDataAsString(
parser_mediator, record['desc'])
event_data.type_protocol = self._ParseBinaryDataAsString(
parser_mediator, record['atyp'])
event_data.where = self._ParseBinaryDataAsString(
parser_mediator, record['srvr'])
date_time = self._ParseDateTimeValue(parser_mediator, record['cdat'])
if date_time:
event = time_events.DateTimeValuesEvent(
date_time, definitions.TIME_DESCRIPTION_CREATION)
parser_mediator.ProduceEventWithEventData(event, event_data)
date_time = self._ParseDateTimeValue(parser_mediator, record['mdat'])
if date_time:
event = time_events.DateTimeValuesEvent(
date_time, definitions.TIME_DESCRIPTION_MODIFICATION)
parser_mediator.ProduceEventWithEventData(event, event_data)
@classmethod
def GetFormatSpecification(cls):
"""Retrieves the format specification.
Returns:
FormatSpecification: format specification.
"""
format_specification = specification.FormatSpecification(cls.NAME)
format_specification.AddNewSignature(b'kych', offset=0)
return format_specification
def ParseFileObject(self, parser_mediator, file_object):
"""Parses a MacOS keychain file-like object.
Args:
parser_mediator (ParserMediator): mediates interactions between parsers
and other components, such as storage and dfvfs.
file_object (dfvfs.FileIO): a file-like object.
Raises:
UnableToParseFile: when the file cannot be parsed.
"""
try:
file_header = self._ReadFileHeader(file_object)
except (ValueError, errors.ParseError):
raise errors.UnableToParseFile('Unable to parse file header.')
tables = self._ReadTablesArray(file_object, file_header.tables_array_offset)
table = tables.get(self._RECORD_TYPE_APPLICATION_PASSWORD, None)
if table:
for record in table.records:
self._ParseApplicationPasswordRecord(parser_mediator, record)
table = tables.get(self._RECORD_TYPE_INTERNET_PASSWORD, None)
if table:
for record in table.records:
self._ParseInternetPasswordRecord(parser_mediator, record)
manager.ParsersManager.RegisterParser(KeychainParser)
| 35.628959
| 80
| 0.721234
|
a7c72e2763e3bf5a90650d063b50352d8a7f785c
| 4,188
|
py
|
Python
|
model_zoo/official/cv/googlenet/train.py
|
Gavin-Hoang/mindspore
|
f745ae0799a0840ebba18021c250f0089325a414
|
[
"Apache-2.0"
] | 2
|
2020-08-12T16:14:40.000Z
|
2020-12-04T03:05:57.000Z
|
model_zoo/official/cv/googlenet/train.py
|
Gavin-Hoang/mindspore
|
f745ae0799a0840ebba18021c250f0089325a414
|
[
"Apache-2.0"
] | null | null | null |
model_zoo/official/cv/googlenet/train.py
|
Gavin-Hoang/mindspore
|
f745ae0799a0840ebba18021c250f0089325a414
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""
#################train googlent example on cifar10########################
python train.py
"""
import argparse
import os
import random
import numpy as np
import mindspore.nn as nn
from mindspore import Tensor
from mindspore import context
from mindspore.communication.management import init
from mindspore.nn.optim.momentum import Momentum
from mindspore.train.callback import ModelCheckpoint, CheckpointConfig, LossMonitor, TimeMonitor
from mindspore.train.model import Model, ParallelMode
from mindspore.train.serialization import load_checkpoint, load_param_into_net
from src.config import cifar_cfg as cfg
from src.dataset import create_dataset
from src.googlenet import GoogleNet
random.seed(1)
np.random.seed(1)
def lr_steps(global_step, lr_max=None, total_epochs=None, steps_per_epoch=None):
"""Set learning rate."""
lr_each_step = []
total_steps = steps_per_epoch * total_epochs
decay_epoch_index = [0.3 * total_steps, 0.6 * total_steps, 0.8 * total_steps]
for i in range(total_steps):
if i < decay_epoch_index[0]:
lr_each_step.append(lr_max)
elif i < decay_epoch_index[1]:
lr_each_step.append(lr_max * 0.1)
elif i < decay_epoch_index[2]:
lr_each_step.append(lr_max * 0.01)
else:
lr_each_step.append(lr_max * 0.001)
current_step = global_step
lr_each_step = np.array(lr_each_step).astype(np.float32)
learning_rate = lr_each_step[current_step:]
return learning_rate
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Cifar10 classification')
parser.add_argument('--device_id', type=int, default=None, help='device id of GPU or Ascend. (Default: None)')
args_opt = parser.parse_args()
context.set_context(mode=context.GRAPH_MODE, device_target=cfg.device_target)
if args_opt.device_id is not None:
context.set_context(device_id=args_opt.device_id)
else:
context.set_context(device_id=cfg.device_id)
device_num = int(os.environ.get("DEVICE_NUM", 1))
if device_num > 1:
context.reset_auto_parallel_context()
context.set_auto_parallel_context(device_num=device_num, parallel_mode=ParallelMode.DATA_PARALLEL,
mirror_mean=True)
init()
dataset = create_dataset(cfg.data_path, 1)
batch_num = dataset.get_dataset_size()
net = GoogleNet(num_classes=cfg.num_classes)
# Continue training if set pre_trained to be True
if cfg.pre_trained:
param_dict = load_checkpoint(cfg.checkpoint_path)
load_param_into_net(net, param_dict)
lr = lr_steps(0, lr_max=cfg.lr_init, total_epochs=cfg.epoch_size, steps_per_epoch=batch_num)
opt = Momentum(filter(lambda x: x.requires_grad, net.get_parameters()), Tensor(lr), cfg.momentum,
weight_decay=cfg.weight_decay)
loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction='mean', is_grad=False)
model = Model(net, loss_fn=loss, optimizer=opt, metrics={'acc'},
amp_level="O2", keep_batchnorm_fp32=False, loss_scale_manager=None)
config_ck = CheckpointConfig(save_checkpoint_steps=batch_num * 5, keep_checkpoint_max=cfg.keep_checkpoint_max)
time_cb = TimeMonitor(data_size=batch_num)
ckpoint_cb = ModelCheckpoint(prefix="train_googlenet_cifar10", directory="./", config=config_ck)
loss_cb = LossMonitor()
model.train(cfg.epoch_size, dataset, callbacks=[time_cb, ckpoint_cb, loss_cb])
print("train success")
| 41.058824
| 114
| 0.713706
|
0a44a6b97d4ee6fcc7098a419071cd15a2d73f28
| 3,142
|
py
|
Python
|
aries_cloudagent/config/tests/test_injection_context.py
|
ldej/aries-cloudagent-python
|
25b7a9c08921e67b0962c434102489884ac403b2
|
[
"Apache-2.0"
] | 1
|
2021-01-15T01:04:43.000Z
|
2021-01-15T01:04:43.000Z
|
aries_cloudagent/config/tests/test_injection_context.py
|
ldej/aries-cloudagent-python
|
25b7a9c08921e67b0962c434102489884ac403b2
|
[
"Apache-2.0"
] | 1
|
2020-06-16T20:20:55.000Z
|
2020-06-16T20:20:55.000Z
|
aries_cloudagent/config/tests/test_injection_context.py
|
ldej/aries-cloudagent-python
|
25b7a9c08921e67b0962c434102489884ac403b2
|
[
"Apache-2.0"
] | 1
|
2020-04-30T08:22:22.000Z
|
2020-04-30T08:22:22.000Z
|
from asynctest import TestCase as AsyncTestCase
from ..base import BaseInjector, InjectorError
from ..injection_context import InjectionContext, InjectionContextError
class TestInjectionContext(AsyncTestCase):
def setUp(self):
self.test_key = "TEST"
self.test_value = "VALUE"
self.test_scope = "SCOPE"
self.test_settings = {self.test_key: self.test_value}
self.test_instance = InjectionContext(settings=self.test_settings)
def test_settings_init(self):
"""Test settings initialization."""
assert self.test_instance.scope_name == self.test_instance.ROOT_SCOPE
for key in self.test_settings:
assert key in self.test_instance.settings
assert self.test_instance.settings[key] == self.test_settings[key]
def test_simple_scope(self):
"""Test scope entrance and exit."""
with self.assertRaises(InjectionContextError):
self.test_instance.start_scope(None)
with self.assertRaises(InjectionContextError):
self.test_instance.start_scope(self.test_instance.ROOT_SCOPE)
injector = self.test_instance.injector_for_scope(self.test_instance.ROOT_SCOPE)
assert injector == self.test_instance.injector
assert self.test_instance.injector_for_scope("no such scope") is None
context = self.test_instance.start_scope(self.test_scope)
assert context.scope_name == self.test_scope
with self.assertRaises(InjectionContextError):
context.start_scope(self.test_instance.ROOT_SCOPE)
assert self.test_instance.scope_name == self.test_instance.ROOT_SCOPE
def test_settings_scope(self):
"""Test scoped settings."""
upd_settings = {self.test_key: "NEWVAL"}
context = self.test_instance.start_scope(self.test_scope, upd_settings)
assert context.settings[self.test_key] == "NEWVAL"
assert self.test_instance.settings[self.test_key] == self.test_value
root = context.injector_for_scope(context.ROOT_SCOPE)
assert root.settings[self.test_key] == self.test_value
async def test_inject_simple(self):
"""Test a basic injection."""
assert (await self.test_instance.inject(str, required=False)) is None
with self.assertRaises(InjectorError):
await self.test_instance.inject(str)
self.test_instance.injector.bind_instance(str, self.test_value)
assert (await self.test_instance.inject(str)) is self.test_value
self.test_instance.injector = None
assert self.test_instance.injector is None
async def test_inject_scope(self):
"""Test a scoped injection."""
context = self.test_instance.start_scope(self.test_scope)
assert (await context.inject(str, required=False)) is None
context.injector.bind_instance(str, self.test_value)
assert (await context.inject(str)) is self.test_value
assert (await self.test_instance.inject(str, required=False)) is None
root = context.injector_for_scope(context.ROOT_SCOPE)
assert (await root.inject(str, required=False)) is None
| 46.205882
| 87
| 0.710694
|
182aa284decccae02a6fb64c7bacd9a5b5f4488e
| 11,396
|
py
|
Python
|
app/viz.py
|
tigju/PT17_cityspire-b-ds
|
488ef2c7e11ff37574e88f5d41434509020c7439
|
[
"MIT"
] | 1
|
2021-04-09T18:44:01.000Z
|
2021-04-09T18:44:01.000Z
|
app/viz.py
|
tigju/PT17_cityspire-b-ds
|
488ef2c7e11ff37574e88f5d41434509020c7439
|
[
"MIT"
] | 3
|
2021-04-07T03:48:08.000Z
|
2021-04-19T14:21:36.000Z
|
app/viz.py
|
tigju/PT17_cityspire-b-ds
|
488ef2c7e11ff37574e88f5d41434509020c7439
|
[
"MIT"
] | 5
|
2021-03-11T04:20:27.000Z
|
2021-05-01T20:03:05.000Z
|
"""Data visualization functions"""
from fastapi import APIRouter, HTTPException, Depends
from pydantic import BaseModel
import pandas as pd
import plotly.express as px
from plotly.subplots import make_subplots
import plotly.graph_objects as go
from app.ml import City, validate_city
from app.state_abbr import us_state_abbrev as abbr
from app.db import select_weather_conditions, select_weather_historical, select_weather_daily
router = APIRouter()
MODEL_CSV = 'https://media.githubusercontent.com/media/CityScape-Datasets/Workspace_Datasets/main/Models/nn_model/nn_model.csv'
class CityData():
"""
Locates specific city data
- Demographics
- Employement -> industry, employment
- Crime -> violent crime, property crime
- Air Quality Index
"""
def __init__(self, current_city):
self.current_city = current_city
self.dataframe = pd.read_csv(MODEL_CSV)
self.subset = self.dataframe[self.dataframe['City'] == self.current_city.city]
def demographics(self):
self.demographics = ['Hispanic', 'White', 'Black', 'Native', 'Asian', 'Pacific']
return self.demographics
def industry(self):
self.industry = ['PrivateWork', 'PublicWork', 'SelfEmployed', 'FamilyWork']
return self.industry
def employment(self):
self.employment= ['Professional', 'Service', 'Office', 'Construction', 'Production']
return self.employment
def crime(self):
self.crime = ['Violent crime', 'Property crime', 'Arson']
return self.crime
def violent_crime(self):
self.violent_crime= ['Murder and nonnegligent manslaughter','Rape', 'Robbery', 'Aggravated assault']
return self.violent_crime
def property_crime(self):
self.property_crime = ['Burglary','Larceny- theft', 'Motor vehicle theft']
return self.property_crime
def air_quality_index(self):
self.air_quality_index = ['Days with AQI', 'Good Days', 'Moderate Days','Unhealthy for Sensitive Groups Days', 'Unhealthy Days','Very Unhealthy Days', 'Hazardous Days', 'Max AQI', '90th Percentile AQI', 'Median AQI', 'Days CO', 'Days NO2', 'Days Ozone', 'Days SO2', 'Days PM2.5', 'Days PM10']
return self.air_quality_index
@router.post("/api/demographics_graph")
async def demographics_plot(current_city:City):
"""
Visualize demographic information for city
### Query Parameters
- city
### Response
JSON string to render with react-plotly.js
"""
city = validate_city(current_city)
city_data = CityData(city)
# Dempgraphics
city_demographics = city_data.subset[city_data.demographics()]
city_demographics['Not Specified'] = 100 - city_demographics.sum(axis=1) # Accounting for people that did not respond
melt = pd.melt(city_demographics)
melt.columns = ['demographic', 'percentage']
fig = px.pie(melt, values ='percentage', names ='demographic')
fig.update_layout(
title={
'text': f'Demographics in {city}',
'y':0.98,
'x':0.5,
'xanchor': 'center',
'yanchor': 'top'})
fig.show()
# fig.write_html("path/to/file.html")
return fig.to_json()
@router.post("/api/employment_graph")
async def employment_plot(current_city:City):
"""
Visualize employment information for city
- see industry breakdown and employment type
### Query Parameters
- city
### Response
JSON string to render with react-plotly.js
"""
city = validate_city(current_city)
city_data = CityData(city)
# Industry
industry_type = city_data.subset[city_data.industry()]
industry_melt = pd.melt(industry_type)
industry_melt.columns = ['industry', 'percentage']
# Employment Type
employment_type = city_data.subset[city_data.employment()]
type_melt = pd.melt(employment_type)
type_melt.columns = ['employment type', 'percentage']
#Create subplots
fig = make_subplots(rows=1, cols=2, subplot_titles = (f'Industry in {city}', f'Employment Types in {city}'))
fig.add_trace(go.Bar(x = industry_melt['industry'], y = industry_melt['percentage'],
marker = dict(color = industry_melt['percentage'], coloraxis = "coloraxis")),
row = 1, col = 1)
fig.add_trace(go.Bar(x =type_melt['employment type'], y =type_melt['percentage'],
marker = dict(color = type_melt['percentage'], coloraxis = "coloraxis")),
row = 1, col = 2)
fig.update_layout(
coloraxis=dict(colorscale = 'Bluered_r'),
coloraxis_showscale = False,
showlegend = False)
fig.show()
# fig.write_html("path/to/file.html")
return fig.to_json()
@router.post("/api/crime_graph")
async def crime_plot(current_city:City):
"""
Visualize crime information for city
- see overall crime breakdown
- visualize breakdown of violent crime and property crime
### Query Parameters
- city
### Response
JSON string to render with react-plotly.js
"""
city = validate_city(current_city)
city_data = CityData(city)
# Crime Categories
crime_type = city_data.subset[city_data.crime()]
crime_melt = pd.melt(crime_type)
crime_melt.columns = ['categories', 'total']
# Violent Crime
violent_crime_type = city_data.subset[city_data.violent_crime()]
violent_crime_type_melt = pd.melt(violent_crime_type)
violent_crime_type_melt.columns = ['violent crime type', 'total']
# Property Crime
property_crime_type = city_data.subset[city_data.property_crime()]
property_crime_melt = pd.melt(property_crime_type)
property_crime_melt.columns = ['property crime type', 'total']
#Create subplots
fig = make_subplots(
rows=2, cols=2,
subplot_titles = (f"Crime Breakdown in {city}", f"Violent Crime Breakdown in {city}", f"Property Crime Breakdown in {city}"),
specs = [[{"type":"xy", 'rowspan':2}, {"type": "pie"}],
[None, {"type": "pie"}]],
)
fig.add_trace(go.Bar(name = 'Crime Types', x = crime_melt['categories'], y = crime_melt['total']),
row = 1, col = 1)
fig.add_trace(go.Pie(values = violent_crime_type_melt['total'],
labels = violent_crime_type_melt['violent crime type']),
row = 1, col = 2)
fig.add_trace(go.Pie(values = property_crime_melt['total'],
labels = property_crime_melt['property crime type']),
row = 2, col = 2)
fig.show()
# fig.write_html("path/to/file.html")
return fig.to_json()
@router.post("/api/aqi_graph")
async def air_quality_plot(current_city:City):
"""
Visualize air quality information for city
### Query Parameters
- city
### Response
JSON string to render with react-plotly.js
"""
city = validate_city(current_city)
city_data = CityData(city)
# Air Quality
air_quality_details = city_data.subset[city_data.air_quality_index()]
air_quality_melt = pd.melt(air_quality_details)
air_quality_melt.columns = ['air quality indicators', 'days']
fig = make_subplots(rows = 1, cols = 1)
fig.add_trace(go.Bar(x = air_quality_melt['days'], y = air_quality_melt['air quality indicators'],
marker = dict(color = air_quality_melt['days'], coloraxis = "coloraxis"), orientation = 'h'))
fig.update_layout(
coloraxis=dict(colorscale = 'Viridis'),
coloraxis_showscale = False,
xaxis_range = [0, 360],
title={
'text': f'Air Quality in {city}',
'y':0.95,
'x':0.5,
'xanchor': 'center',
'yanchor': 'top'})
fig.show()
# fig.write_html("path/to/file.html")
return fig.to_json()
@router.post("/api/weather_forecast_graph")
async def weather_forecast_plot(current_city:City):
"""
Visualize weather temperature forecast for city
### Query Parameters
- city
### Response
JSON string to render with react-plotly.js
"""
city = validate_city(current_city)
historical = await select_weather_historical(city)
forecast = await select_weather_daily(city)
# get data
hist = pd.DataFrame(historical).sort_values(by=['date'])
forec = pd.DataFrame(forecast).sort_values(by=['date'])
layout = go.Layout(
legend=dict(x=0, y=1.2, traceorder='normal', font=dict(size=12,))
)
fig = go.Figure(layout=layout)
fig.add_trace(go.Scatter(x=hist['date'], y=hist['avg_temp_fahrenheit'],
line=dict(width=1),
mode='lines+markers',
name='Historical temperature'))
fig.add_trace(go.Scatter(x=forec['date'], y=forec['average_temperature'],
line=dict(color='#FF8F34', width=3, dash='dash'),
name='Predicted avg temperature'))
fig.add_trace(go.Scatter(x=forec['date'], y=forec['min_temperature'],
line = dict(color='rgb(150,150,150)', width=2, dash='dot'),
name='Predicted min temperature'))
fig.add_trace(go.Scatter(x=forec['date'], y=forec['max_temperature'],
line = dict(color='rgb(150,150,150)', width=2, dash='dot'),
name='Predicted max temperature'))
fig.update_layout(
autosize=False, width=980, height=600,
margin=dict(l=10, r=10, b=10, t=100, pad=4),
yaxis=dict(title_text="Temperature deg Fahrenheit"),
xaxis=dict(title_text="Date"),
font=dict(family="Courier New, monospace", size=15),
title={
'text': "Historical and Forecast temperature {}, {}".format(city.city, city.state),
'y': 0.9,
'x': 0.65,
'xanchor': 'center',
'yanchor': 'top'
}
)
fig.show()
return fig.to_json()
@router.post("/api/weather_conditions_graph")
async def weather_conditions_plot(current_city:City):
"""
Visualize weather condirions for city
### Query Parameters
- city
### Response
JSON string to render with react-plotly.js
"""
city = validate_city(current_city)
weather_conditions = await select_weather_conditions(city)
weather_dict = dict(weather_conditions)
df_conditions = pd.Series(weather_dict)
fig = go.Figure([go.Bar(x=['sunny', 'cloudy', 'rainy', 'snowy'],
y=df_conditions[['sunny_days_avg_year', 'cloudy_days_avg_year', 'rainy_days_avg_year', 'snowy_days_avg_year']],
hovertext=['Sunny Days per year', 'Cloudy Days per year', 'Rainy Days per year', 'Snowy Days per year'])])
fig.update_layout(
autosize=False,
width=980,
height=600,
margin=dict(l=10, r=10, b=10, t=100, pad=4),
yaxis=dict(title_text="Number of Days Per Year"),
xaxis=dict(title_text="Weather Conditions"),
font=dict(family="Courier New, monospace", size=15),
title={
'text': "Annual Weather Conditions for {}, {}".format(city.city, city.state),
'y': 0.9,
'x': 0.5,
'xanchor': 'center',
'yanchor': 'top'
}
)
fig.update_traces(marker_color='rgb(177, 255, 8)', marker_line_color='rgb(97, 140, 3)',
marker_line_width=1.5, opacity=0.6)
fig.show()
return fig.to_json()
| 36.880259
| 300
| 0.635574
|
43852d78b0355ffe3117e5f898779fd88f4f8df0
| 1,105
|
py
|
Python
|
register/views.py
|
crackcraft/pyreg
|
719e673cd694cd2dd76d2a1016c1177c752c098f
|
[
"CC0-1.0"
] | null | null | null |
register/views.py
|
crackcraft/pyreg
|
719e673cd694cd2dd76d2a1016c1177c752c098f
|
[
"CC0-1.0"
] | null | null | null |
register/views.py
|
crackcraft/pyreg
|
719e673cd694cd2dd76d2a1016c1177c752c098f
|
[
"CC0-1.0"
] | null | null | null |
import os
from django.shortcuts import render
from django.conf import settings
from django.http import HttpResponse
from . import database
from .models import PageView, Record
# Create your views here.
def get_client_ip(request):
x_forwarded_for = request.META.get('HTTP_X_FORWARDED_FOR')
if x_forwarded_for:
ip = x_forwarded_for.split(',')[0]
else:
ip = request.META.get('REMOTE_ADDR')
return ip
def index(request):
hostname = os.getenv('HOSTNAME', 'unknown')
PageView.objects.create(hostname=hostname)
return render(request, './index.html', {
# 'hostname': hostname,
# 'database': database.info(),
# 'count': PageView.objects.count()
})
def register(request):
if request.POST['email'] == 'af8dc2fc-c7db-4295-9728-2a26673c8705':
return render(request, './report.html', {'data':Record.objects.all()})
else:
Record.objects.create(ip=get_client_ip(request), email=request.POST['email'])
return render(request, './done.html')
def health(request):
return HttpResponse(PageView.objects.count())
| 26.95122
| 85
| 0.681448
|
5a6099150b79dfea01744e3ca91c206621319f79
| 1,848
|
py
|
Python
|
plot_uhlmann_convergence.py
|
mhauru/uhlmann-fidelities-from-tensor-networks
|
cc30882db529fcc787418b172550ae91bd34dcba
|
[
"MIT"
] | 3
|
2018-10-07T03:16:12.000Z
|
2020-11-19T01:22:36.000Z
|
plot_uhlmann_convergence.py
|
mhauru/uhlmann-fidelities-from-tensor-networks
|
cc30882db529fcc787418b172550ae91bd34dcba
|
[
"MIT"
] | null | null | null |
plot_uhlmann_convergence.py
|
mhauru/uhlmann-fidelities-from-tensor-networks
|
cc30882db529fcc787418b172550ae91bd34dcba
|
[
"MIT"
] | null | null | null |
import numpy as np
import scipy.integrate as integrate
import pickle
import warnings
import datetime
import sys
import matplotlib.cm as cm
import matplotlib.colors as colors
import matplotlib.gridspec as gridspec
from matplotlib import pyplot as plt
from ncon import ncon
from tntools import datadispenser
noshow = "noshow" in sys.argv
practice = "practice" in sys.argv
datadir = "uhlmann_compare_data"
plot_file = "uhlmann_convergence.pdf"
L = 300
L_plot = 300
def fid_func(fids):
fids = 1-fids
fids = fids[:L_plot]
return fids
fig = plt.figure(figsize=(8,4))
ax = fig.add_subplot(1,1,1)
with open("./{}/fids_exact_latest_21_11_1.0_1.0_{}.p".format(datadir, L), "rb") as f:
fids = pickle.load(f)
fids = np.array(fids)
fids = fid_func(fids)
ax.semilogy(fids, ls=":",
label="$F, \; h=1.0$",
color="blue")
with open("./{}/fids_sep_latest_21_11_1.0_1.0_{}.p".format(datadir, L), "rb") as f:
fids = pickle.load(f)
fids = np.array(fids)
fids = fid_func(fids)
ax.semilogy(fids, ls="-",
label="$F_{\mathrm{d}}, \; h=1.0$",
color="blue")
with open("./{}/fids_exact_latest_21_11_1.05_1.05_{}.p".format(datadir, L), "rb") as f:
fids = pickle.load(f)
fids = np.array(fids)
fids = fid_func(fids)
ax.semilogy(fids, ls=":",
label="$F, \; h=1.05$",
color="green")
with open("./{}/fids_sep_latest_21_11_1.05_1.05_{}.p".format(datadir, L), "rb") as f:
fids = pickle.load(f)
fids = np.array(fids)
fids = fid_func(fids)
ax.semilogy(fids, ls="-",
label="$F_{\mathrm{d}}, \; h=1.05$",
color="green")
ax.set_ylim(1e-10, 1e-1)
ax.set_xlabel("Window size")
ax.set_ylabel("$1 - $ fidelity")
ax.legend()
plt.savefig(plot_file)
if not noshow:
plt.show()
| 25.315068
| 87
| 0.617965
|
2299e0e3bca86887febc08be7ec43d085813376a
| 3,677
|
py
|
Python
|
Code/FlashWriter/client.py
|
ctag-fh-kiel/troll-8
|
18b872b5b0290dbb0e9f514edea392601a896346
|
[
"CC-BY-4.0"
] | 11
|
2017-11-01T14:47:33.000Z
|
2022-01-31T09:04:44.000Z
|
Code/FlashWriter/client.py
|
ctag-fh-kiel/troll-8
|
18b872b5b0290dbb0e9f514edea392601a896346
|
[
"CC-BY-4.0"
] | null | null | null |
Code/FlashWriter/client.py
|
ctag-fh-kiel/troll-8
|
18b872b5b0290dbb0e9f514edea392601a896346
|
[
"CC-BY-4.0"
] | 3
|
2017-11-20T17:22:12.000Z
|
2021-11-08T23:23:13.000Z
|
# Flash client with automatic resampling for Troll8
# Duration of inputfile should be integers (in seconds)
#
# Dependencies:
# - pyserial (pip install pyserial)
# - Python Audio Tools (http://audiotools.sourceforge.net/install.html)
import struct
import sys
import getopt
import wave
import audioop
import array
import serial
def ByteToHex( byteStr ):
"""
Convert a byte string to it's hex string representation e.g. for output.
"""
return ''.join( [ "%02X " % ord( x ) for x in byteStr ] ).strip()
def HexToByte( hexStr ):
"""
Convert a string hex byte values into a byte string. The Hex Byte values may
or may not be space separated.
"""
bytes = []
hexStr = ''.join( hexStr.split(" ") )
for i in range(0, len(hexStr), 2):
bytes.append( chr( int (hexStr[i:i+2], 16 ) ) )
return ''.join( bytes )
def main(argv):
# Resample wavfile
TARGET_SAMPLERATE = 16384
inputfile = ''
comport = ''
address = 0
erase = 0
try:
opts, args = getopt.getopt(argv, "hi:p:a:e:", ["ifile=", "port=", "address=", "erase"])
except getopt.GetoptError:
print 'client.py -i <inputfile> -p <comport> -a address'
sys.exit(2)
for opt, arg in opts:
if opt == '-h':
print 'client.py -i <inputfile> -p <comport>'
sys.exit()
elif opt in ("-i", "--ifile"):
inputfile = arg
elif opt in ("-p", "--port"):
comport = arg
elif opt in ("-a", "--address"):
address = int(arg)
elif opt in ("-e", "--erase"):
erase = 1
print 'Loading', inputfile, '...'
infile = wave.open(inputfile, 'rb')
length = infile.getnframes()
duration = infile.getframerate()/length
print 'Duration: ', duration, 's'
print 'Samplingrate: ', infile.getframerate()
audiodata = infile.readframes(length)
infile.close()
if infile.getnchannels() == 2:
audiodata_n = audioop.ratecv(audiodata, 2, 2, infile.getframerate(), TARGET_SAMPLERATE, None)
else:
audiodata_n = audioop.ratecv(audiodata, 2, 1, infile.getframerate(), TARGET_SAMPLERATE, None)
if infile.getnchannels() == 2:
audiodata_n = audioop.tomono(audiodata_n[0], 2, 0.5, 0.5)
audiodata_n = audioop.lin2lin(audiodata_n, 2, 1)
audiodata_n = audioop.bias(audiodata_n, 1, 128)
byte_data = bytearray(audiodata_n)
outfile = wave.open('Outfile.wav', 'wb')
outfile.setparams((1, 1, TARGET_SAMPLERATE, 0, 'NONE', 'not compressed'))
outfile.writeframes(audiodata_n)
outfile.close()
# Convert datasize (int) to 4 bytes
data_size = bytearray(4)
data_size[0] = (len(byte_data)>>24) & 0xff
data_size[1] = (len(byte_data)>>16) & 0xff
data_size[2] = (len(byte_data)>>8) & 0xff
data_size[3] = len(byte_data) & 0xff
# Convert address (int) to 4 bytes
address_bytes = bytearray(4)
address_bytes[0] = (address>>24) & 0xff
address_bytes[1] = (address>>16) & 0xff
address_bytes[2] = (address>>8) & 0xff
address_bytes[3] = address & 0xff
# Transfer wavfile
serialport = serial.Serial(comport, 9600, timeout=1)
if erase:
serialport.write(bytearray('\x02')) # Erase Flash
# success = serialport.read()
# if !success:
# print 'Failed to write to flash (try --erase)'
serialport.write(bytearray('\x01')) # Write request
serialport.write(data_size) # Data size
serialport.write(address_bytes) # Address
print 'Tranfering audio file...'
serialport.write(byte_data)
print 'Success!'
serialport.close()
if __name__ == "__main__":
main(sys.argv[1:])
| 30.139344
| 101
| 0.616263
|
c0c2c3dedeab23919a5e9e8ea605c1bc6c71020e
| 515
|
py
|
Python
|
tests/test_set_where_x_greater_than_y.py
|
tlambert-forks/pyclesperanto_prototype
|
aea964a75e691f19b7753040daa8b276d57ccf36
|
[
"BSD-3-Clause"
] | 64
|
2020-03-18T12:11:22.000Z
|
2022-03-31T08:19:18.000Z
|
tests/test_set_where_x_greater_than_y.py
|
haesleinhuepf/pyclesperanto_prototype
|
65bc3035d3b2b61a2722c93b95bae310bfbd190e
|
[
"BSD-3-Clause"
] | 148
|
2020-05-14T06:14:11.000Z
|
2022-03-26T15:02:31.000Z
|
tests/test_set_where_x_greater_than_y.py
|
haesleinhuepf/pyclesperanto_prototype
|
65bc3035d3b2b61a2722c93b95bae310bfbd190e
|
[
"BSD-3-Clause"
] | 16
|
2020-05-31T00:53:44.000Z
|
2022-03-23T13:20:57.000Z
|
import pyclesperanto_prototype as cle
import numpy as np
def test_set_where_x_greater_than_y():
result = cle.push(np.asarray([
[0, 0, 0, 1],
[0, 0, 3, 1],
[0, 0, 3, 1],
[1, 1, 1, 1]
]).T)
reference = cle.push(np.asarray([
[0, 0, 0, 1],
[3, 0, 3, 1],
[3, 3, 3, 1],
[3, 3, 3, 1]
]).T)
cle.set_where_x_greater_than_y(result, 3)
a = cle.pull(result)
b = cle.pull(reference)
print(a)
assert (np.array_equal(a, b))
| 19.074074
| 45
| 0.500971
|
bcc51590aad25540dcebd720e0c46dfaca29c1ab
| 1,651
|
py
|
Python
|
setup.py
|
PandaWhoCodes/torstream
|
a619b382677cf70e7eae3bff8800741f84da475b
|
[
"MIT"
] | 3
|
2018-04-03T04:13:22.000Z
|
2021-11-10T09:03:29.000Z
|
setup.py
|
PandaWhoCodes/torstream
|
a619b382677cf70e7eae3bff8800741f84da475b
|
[
"MIT"
] | null | null | null |
setup.py
|
PandaWhoCodes/torstream
|
a619b382677cf70e7eae3bff8800741f84da475b
|
[
"MIT"
] | null | null | null |
"""
Stream any media content using console
"""
from setuptools import setup
dependencies = ['bs4', 'lxml', 'requests', 'argparse']
setup(
name='torstream',
version='0.1.6',
url='https://github.com/PandaWhoCodes/torstream',
license='MIT',
author='Thomas Ashish Cherian',
author_email='ufoundashish@gmail.com',
description='Stream any media content using console',
long_description=__doc__,
packages=['torstream'],
include_package_data=True,
zip_safe=False,
platforms='any',
scripts=['torstream/streamer.py'],
install_requires=dependencies,
entry_points={
'console_scripts': [
'torstream = torstream.streamer:argParser',
],
},
classifiers=[
# As from http://pypi.python.org/pypi?%3Aaction=list_classifiers
# 'Development Status :: 1 - Planning',
# 'Development Status :: 2 - Pre-Alpha',
# 'Development Status :: 3 - Alpha',
'Development Status :: 4 - Beta',
# 'Development Status :: 5 - Production/Stable',
# 'Development Status :: 6 - Mature',
# 'Development Status :: 7 - Inactive',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: POSIX',
'Operating System :: MacOS',
'Operating System :: Unix',
'Operating System :: Microsoft :: Windows',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)
| 32.372549
| 72
| 0.608116
|
6c38b0173dc5e7d77f515b7907cd4178ccaab7dd
| 3,665
|
py
|
Python
|
code/mrubis_controller/component_utility_predictor.py
|
hpi-classroom/Adversarial-Digital-Twins
|
f997466834df26adce29bea97115450fe4dcf4f7
|
[
"MIT"
] | null | null | null |
code/mrubis_controller/component_utility_predictor.py
|
hpi-classroom/Adversarial-Digital-Twins
|
f997466834df26adce29bea97115450fe4dcf4f7
|
[
"MIT"
] | 1
|
2021-07-14T12:50:20.000Z
|
2021-07-14T12:50:20.000Z
|
code/mrubis_controller/component_utility_predictor.py
|
hpi-classroom/Adversarial-Digital-Twins
|
f997466834df26adce29bea97115450fe4dcf4f7
|
[
"MIT"
] | 2
|
2021-05-06T15:30:48.000Z
|
2021-12-08T11:06:24.000Z
|
from os import stat
import pandas as pd
import numpy as np
from pathlib import Path
from sklearn.linear_model import Ridge
class RidgeUtilityPredictor():
def __init__(self) -> None:
self.train_cols = {
'features': [
'CRITICALITY',
'CONNECTIVITY',
'RELIABILITY',
'IMPORTANCE',
'PROVIDED_INTERFACE',
'REQUIRED_INTERFACE',
'ADT',
' PMax',
'alpha',
'In Use REPLICA',
'LOAD',
],
'y': 'OPTIMAL_UTILITY'
}
self.mrubis_features_to_train_map = {
'criticality': 'CRITICALITY',
'connectivity': 'CONNECTIVITY',
'reliability': 'RELIABILITY',
'importance': 'IMPORTANCE',
'provided_interface': 'PROVIDED_INTERFACE',
'required_interface': 'REQUIRED_INTERFACE',
'adt': 'ADT',
'perf_max': ' PMax',
'sat_point': 'alpha',
'replica': 'In Use REPLICA',
'request': 'LOAD',
}
def load_train_data(self, path=Path('../data/TrainingmRUBiS_Theta0.05_NonStationary.csv')):
data = pd.read_csv(
path,
usecols=self.train_cols['features'] + [self.train_cols['y']]
).dropna()
self.data = self.calculate_interaction_terms(data)
self.data = data
self.train_X = self.data[self.train_cols['features'] + ['beta', 'Linear', 'Saturating', 'Discontinuous', 'Combined']].drop(['IMPORTANCE', 'CONNECTIVITY', 'REQUIRED_INTERFACE'], axis=1)
self.train_y = self.data[self.train_cols['y']]
@staticmethod
def calculate_interaction_terms(data):
data['beta'] = data.apply(lambda row: row['PROVIDED_INTERFACE'] + (0 if row['PROVIDED_INTERFACE'] >= 2 else 1), axis=1)
data['Linear'] = data['RELIABILITY'] * data['CRITICALITY'] * (data['PROVIDED_INTERFACE'] + data['REQUIRED_INTERFACE'])
data['Saturating'] = data['RELIABILITY'] * \
data['CRITICALITY'] * \
data[' PMax'] * \
np.tanh(data['alpha'] * \
data['In Use REPLICA'] / data['LOAD']) * \
(data['PROVIDED_INTERFACE'] + data['REQUIRED_INTERFACE'])
data['Discontinuous'] = data['RELIABILITY'] * data['CRITICALITY'] * (data['REQUIRED_INTERFACE'] + 1) * data['IMPORTANCE'] * data['beta'] * data['PROVIDED_INTERFACE'] - 10 * data['ADT']
data['Combined'] = data['RELIABILITY'] * data['CRITICALITY'] * data['IMPORTANCE'] * data['beta'] * data[' PMax'] * np.tanh(data['alpha'] * data['In Use REPLICA'] / data['LOAD']) * (data['REQUIRED_INTERFACE'] + 1) - 10 * data['ADT']
return data
def train_on_batch_file(self):
self.model = Ridge()
self.model.fit(self.train_X, self.train_y)
def train(self, X, y):
self.model.fit(X, y)
def predict_on_mrubis_output(self, component_features):
feature_cols = self.train_cols['features']
renamed_features = component_features.rename(columns=self.mrubis_features_to_train_map)[feature_cols].astype(float)
features_with_interaction_terms = self.calculate_interaction_terms(renamed_features)
required_cols_for_prediction = feature_cols + ['beta', 'Linear', 'Saturating', 'Discontinuous', 'Combined']
relevant_features = features_with_interaction_terms[required_cols_for_prediction].drop(columns=['IMPORTANCE', 'CONNECTIVITY', 'REQUIRED_INTERFACE'])
return self.model.predict(relevant_features)
| 45.246914
| 240
| 0.584993
|
60a3e6408d6f407536334adf5626e3416bc28dc2
| 1,219
|
py
|
Python
|
examples/SimpleTiledTiffConverter.py
|
NHPatterson/bfio
|
0891721b316ccaef6d4bb434e1c80c93fa8fcd69
|
[
"MIT"
] | 6
|
2021-09-09T01:27:13.000Z
|
2021-12-16T13:52:15.000Z
|
examples/SimpleTiledTiffConverter.py
|
NHPatterson/bfio
|
0891721b316ccaef6d4bb434e1c80c93fa8fcd69
|
[
"MIT"
] | 15
|
2021-07-20T13:16:34.000Z
|
2022-03-04T13:34:16.000Z
|
examples/SimpleTiledTiffConverter.py
|
NHPatterson/bfio
|
0891721b316ccaef6d4bb434e1c80c93fa8fcd69
|
[
"MIT"
] | 2
|
2021-11-01T18:08:18.000Z
|
2022-01-26T19:23:12.000Z
|
from bfio import BioReader, BioWriter
from pathlib import Path
import requests
import numpy as np
""" Get an example image """
# Set up the directories
PATH = Path("data")
PATH.mkdir(parents=True, exist_ok=True)
# Download the data if it doesn't exist
URL = "https://github.com/usnistgov/WIPP/raw/master/data/PyramidBuilding/inputCollection/"
FILENAME = "img_r001_c001.ome.tif"
if not (PATH / FILENAME).exists():
content = requests.get(URL + FILENAME).content
(PATH / FILENAME).open("wb").write(content)
""" Convert the tif to tiled tiff """
# Set up the BioReader
with BioReader(PATH / FILENAME,backend='java') as br, \
BioWriter(PATH / 'out.ome.tif',metadata=br.metadata,backend='python') as bw:
# Print off some information about the image before loading it
print('br.shape: {}'.format(br.shape))
print('br.dtype: {}'.format(br.dtype))
# Read in the original image, then save
original_image = br[:]
bw[:] = original_image
# Compare the original and saved images using the Python backend
br = BioReader(PATH.joinpath('out.ome.tif'))
new_image = br.read()
br.close()
print('original and saved images are identical: {}'.format(np.array_equal(new_image,original_image)))
| 31.25641
| 101
| 0.7137
|
9722b326730ac93fc748319faa8820589fb2052d
| 44,928
|
py
|
Python
|
autotest/gcore/cog.py
|
vincentschut/gdal
|
91daf084c245b31a2d6d06fb4b12e2a3229effa1
|
[
"MIT"
] | 7
|
2019-07-18T17:00:22.000Z
|
2021-09-12T19:06:41.000Z
|
autotest/gcore/cog.py
|
vincentschut/gdal
|
91daf084c245b31a2d6d06fb4b12e2a3229effa1
|
[
"MIT"
] | 4
|
2019-07-18T15:49:23.000Z
|
2021-10-19T22:56:50.000Z
|
autotest/gcore/cog.py
|
vincentschut/gdal
|
91daf084c245b31a2d6d06fb4b12e2a3229effa1
|
[
"MIT"
] | 2
|
2019-12-20T07:43:13.000Z
|
2021-04-16T22:45:29.000Z
|
#!/usr/bin/env pytest
# -*- coding: utf-8 -*-
###############################################################################
# $Id$
#
# Project: GDAL/OGR Test Suite
# Purpose: COG driver testing
# Author: Even Rouault <even.rouault at spatialys.com>
#
###############################################################################
# Copyright (c) 2019, Even Rouault <even.rouault at spatialys.com>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
###############################################################################
import pytest
import struct
import sys
from osgeo import gdal
from osgeo import osr
import gdaltest
from test_py_scripts import samples_path
###############################################################################
def _check_cog(filename):
path = samples_path
if path not in sys.path:
sys.path.append(path)
import validate_cloud_optimized_geotiff
try:
_, errors, _ = validate_cloud_optimized_geotiff.validate(filename, full_check=True)
assert not errors, 'validate_cloud_optimized_geotiff failed'
except OSError:
pytest.fail('validate_cloud_optimized_geotiff failed')
###############################################################################
def check_libtiff_internal_or_at_least(expected_maj, expected_min, expected_micro):
md = gdal.GetDriverByName('GTiff').GetMetadata()
if md['LIBTIFF'] == 'INTERNAL':
return True
if md['LIBTIFF'].startswith('LIBTIFF, Version '):
version = md['LIBTIFF'][len('LIBTIFF, Version '):]
version = version[0:version.find('\n')]
got_maj, got_min, got_micro = version.split('.')
got_maj = int(got_maj)
got_min = int(got_min)
got_micro = int(got_micro)
if got_maj > expected_maj:
return True
if got_maj < expected_maj:
return False
if got_min > expected_min:
return True
if got_min < expected_min:
return False
return got_micro >= expected_micro
return False
###############################################################################
# Basic test
def test_cog_basic():
tab = [ 0 ]
def my_cbk(pct, _, arg):
assert pct >= tab[0]
tab[0] = pct
return 1
filename = '/vsimem/cog.tif'
src_ds = gdal.Open('data/byte.tif')
assert src_ds.GetMetadataItem('GDAL_STRUCTURAL_METADATA', 'TIFF') is None
ds = gdal.GetDriverByName('COG').CreateCopy(filename, src_ds,
callback = my_cbk,
callback_data = tab)
src_ds = None
assert tab[0] == 1.0
assert ds
ds = None
ds = gdal.Open(filename)
assert ds.GetRasterBand(1).Checksum() == 4672
assert ds.GetMetadataItem('LAYOUT', 'IMAGE_STRUCTURE') == 'COG'
assert ds.GetMetadataItem('COMPRESSION', 'IMAGE_STRUCTURE') is None
assert ds.GetRasterBand(1).GetOverviewCount() == 0
assert ds.GetRasterBand(1).GetBlockSize() == [512, 512]
assert ds.GetMetadataItem('GDAL_STRUCTURAL_METADATA', 'TIFF') == """GDAL_STRUCTURAL_METADATA_SIZE=000140 bytes
LAYOUT=IFDS_BEFORE_DATA
BLOCK_ORDER=ROW_MAJOR
BLOCK_LEADER=SIZE_AS_UINT4
BLOCK_TRAILER=LAST_4_BYTES_REPEATED
KNOWN_INCOMPATIBLE_EDITION=NO
"""
ds = None
_check_cog(filename)
gdal.GetDriverByName('GTiff').Delete(filename)
###############################################################################
# Test creation options
def test_cog_creation_options():
filename = '/vsimem/cog.tif'
src_ds = gdal.Open('data/byte.tif')
ds = gdal.GetDriverByName('COG').CreateCopy(filename, src_ds,
options = ['COMPRESS=DEFLATE',
'LEVEL=1',
'NUM_THREADS=2'])
assert ds
ds = None
ds = gdal.Open(filename)
assert ds.GetRasterBand(1).Checksum() == 4672
assert ds.GetMetadataItem('COMPRESSION', 'IMAGE_STRUCTURE') == 'DEFLATE'
assert ds.GetMetadataItem('PREDICTOR', 'IMAGE_STRUCTURE') is None
ds = None
filesize = gdal.VSIStatL(filename).size
_check_cog(filename)
gdal.GetDriverByName('COG').CreateCopy(filename, src_ds,
options = ['COMPRESS=DEFLATE',
'BIGTIFF=YES',
'LEVEL=1'])
assert gdal.VSIStatL(filename).size != filesize
gdal.GetDriverByName('COG').CreateCopy(filename, src_ds,
options = ['COMPRESS=DEFLATE',
'PREDICTOR=YES',
'LEVEL=1'])
assert gdal.VSIStatL(filename).size != filesize
ds = gdal.Open(filename)
assert ds.GetMetadataItem('PREDICTOR', 'IMAGE_STRUCTURE') == '2'
ds = None
gdal.GetDriverByName('COG').CreateCopy(filename, src_ds,
options = ['COMPRESS=DEFLATE',
'LEVEL=9'])
assert gdal.VSIStatL(filename).size < filesize
colist = gdal.GetDriverByName('COG').GetMetadataItem('DMD_CREATIONOPTIONLIST')
if '<Value>ZSTD' in colist:
gdal.GetDriverByName('COG').CreateCopy(filename, src_ds,
options = ['COMPRESS=ZSTD'])
ds = gdal.Open(filename)
assert ds.GetMetadataItem('COMPRESSION', 'IMAGE_STRUCTURE') == 'ZSTD'
ds = None
if '<Value>WEBP' in colist:
with gdaltest.error_handler():
assert not gdal.GetDriverByName('COG').CreateCopy(filename, src_ds,
options = ['COMPRESS=WEBP'])
if '<Value>LERC' in colist:
assert gdal.GetDriverByName('COG').CreateCopy(filename, src_ds,
options = ['COMPRESS=LERC'])
filesize_no_z_error = gdal.VSIStatL(filename).size
assert gdal.VSIStatL(filename).size != filesize
assert gdal.GetDriverByName('COG').CreateCopy(filename, src_ds,
options = ['COMPRESS=LERC', 'MAX_Z_ERROR=10'])
filesize_with_z_error = gdal.VSIStatL(filename).size
assert filesize_with_z_error < filesize_no_z_error
assert gdal.GetDriverByName('COG').CreateCopy(filename, src_ds,
options = ['COMPRESS=LERC_DEFLATE'])
filesize_lerc_deflate = gdal.VSIStatL(filename).size
assert filesize_lerc_deflate < filesize_no_z_error
assert gdal.GetDriverByName('COG').CreateCopy(filename, src_ds,
options = ['COMPRESS=LERC_DEFLATE', 'LEVEL=1'])
filesize_lerc_deflate_level_1 = gdal.VSIStatL(filename).size
assert filesize_lerc_deflate_level_1 > filesize_lerc_deflate
if '<Value>ZSTD' in colist:
assert gdal.GetDriverByName('COG').CreateCopy(filename, src_ds,
options = ['COMPRESS=LERC_ZSTD'])
filesize_lerc_zstd = gdal.VSIStatL(filename).size
assert filesize_lerc_zstd < filesize_no_z_error
assert gdal.GetDriverByName('COG').CreateCopy(filename, src_ds,
options = ['COMPRESS=LERC_ZSTD', 'LEVEL=1'])
filesize_lerc_zstd_level_1 = gdal.VSIStatL(filename).size
assert filesize_lerc_zstd_level_1 > filesize_lerc_zstd
src_ds = None
gdal.GetDriverByName('GTiff').Delete(filename)
###############################################################################
# Test creation of overviews
def test_cog_creation_of_overviews():
tab = [ 0 ]
def my_cbk(pct, _, arg):
assert pct >= tab[0]
tab[0] = pct
return 1
directory = '/vsimem/test_cog_creation_of_overviews'
filename = directory + '/cog.tif'
src_ds = gdal.Translate('', 'data/byte.tif',
options='-of MEM -outsize 2048 300')
with gdaltest.config_option('GDAL_TIFF_INTERNAL_MASK', 'YES'):
check_filename = '/vsimem/tmp.tif'
ds = gdal.GetDriverByName('GTiff').CreateCopy(check_filename, src_ds,
options = ['TILED=YES'])
ds.BuildOverviews('CUBIC', [2, 4])
cs1 = ds.GetRasterBand(1).GetOverview(0).Checksum()
cs2 = ds.GetRasterBand(1).GetOverview(1).Checksum()
ds = None
gdal.Unlink(check_filename)
ds = gdal.GetDriverByName('COG').CreateCopy(filename, src_ds,
callback = my_cbk,
callback_data = tab)
assert tab[0] == 1.0
assert ds
assert len(gdal.ReadDir(directory)) == 1 # check that the temp file has gone away
ds = None
ds = gdal.Open(filename)
assert ds.GetRasterBand(1).Checksum() == src_ds.GetRasterBand(1).Checksum()
assert ds.GetRasterBand(1).GetOverviewCount() == 2
assert ds.GetRasterBand(1).GetOverview(0).Checksum() == cs1
assert ds.GetRasterBand(1).GetOverview(1).Checksum() == cs2
ds = None
_check_cog(filename)
src_ds = None
gdal.GetDriverByName('GTiff').Delete(filename)
gdal.Unlink(directory)
###############################################################################
# Test creation of overviews with a different compression method
def test_cog_creation_of_overviews_with_compression():
directory = '/vsimem/test_cog_creation_of_overviews_with_compression'
filename = directory + '/cog.tif'
src_ds = gdal.Translate('', 'data/byte.tif',
options='-of MEM -outsize 2048 300')
ds = gdal.GetDriverByName('COG').CreateCopy(filename, src_ds,
options = ['COMPRESS=LZW', 'OVERVIEW_COMPRESS=JPEG', 'OVERVIEW_QUALITY=50'])
assert ds.GetRasterBand(1).GetOverviewCount() == 2
assert ds.GetMetadata('IMAGE_STRUCTURE')['COMPRESSION'] == 'LZW'
ds_overview_a = gdal.Open('GTIFF_DIR:2:' + filename)
assert ds_overview_a.GetMetadata('IMAGE_STRUCTURE')['COMPRESSION'] == 'JPEG'
assert ds_overview_a.GetMetadata('IMAGE_STRUCTURE')['JPEG_QUALITY'] == '50'
ds_overview_b = gdal.Open('GTIFF_DIR:3:' + filename)
assert ds_overview_b.GetMetadata('IMAGE_STRUCTURE')['COMPRESSION'] == 'JPEG'
assert ds_overview_a.GetMetadata('IMAGE_STRUCTURE')['JPEG_QUALITY'] == '50'
ds_overview_a = None
ds_overview_b = None
ds = None
src_ds = None
gdal.GetDriverByName('GTiff').Delete(filename)
gdal.Unlink(directory)
###############################################################################
# Test creation of overviews with a dataset with a mask
def test_cog_creation_of_overviews_with_mask():
tab = [ 0 ]
def my_cbk(pct, _, arg):
assert pct >= tab[0]
tab[0] = pct
return 1
directory = '/vsimem/test_cog_creation_of_overviews_with_mask'
gdal.Mkdir(directory, 0o755)
filename = directory + '/cog.tif'
src_ds = gdal.Translate('', 'data/byte.tif',
options='-of MEM -outsize 2048 300')
src_ds.CreateMaskBand(gdal.GMF_PER_DATASET)
src_ds.GetRasterBand(1).GetMaskBand().WriteRaster(0, 0, 1024, 300, b'\xFF',
buf_xsize = 1, buf_ysize = 1)
with gdaltest.config_option('GDAL_TIFF_INTERNAL_MASK', 'YES'):
check_filename = '/vsimem/tmp.tif'
ds = gdal.GetDriverByName('GTiff').CreateCopy(check_filename, src_ds,
options = ['TILED=YES'])
ds.BuildOverviews('CUBIC', [2, 4])
cs1 = ds.GetRasterBand(1).GetOverview(0).Checksum()
cs2 = ds.GetRasterBand(1).GetOverview(1).Checksum()
ds = None
gdal.Unlink(check_filename)
ds = gdal.GetDriverByName('COG').CreateCopy(filename, src_ds,
callback = my_cbk,
callback_data = tab)
assert tab[0] == 1.0
assert ds
assert len(gdal.ReadDir(directory)) == 1 # check that the temp file has gone away
ds = None
ds = gdal.Open(filename)
assert ds.GetRasterBand(1).Checksum() == src_ds.GetRasterBand(1).Checksum()
assert ds.GetRasterBand(1).GetOverviewCount() == 2
assert ds.GetRasterBand(1).GetOverview(0).GetBlockSize() == [512, 512]
assert ds.GetRasterBand(1).GetOverview(0).Checksum() == cs1
assert ds.GetRasterBand(1).GetOverview(1).Checksum() == cs2
ds = None
_check_cog(filename)
src_ds = None
gdal.GetDriverByName('GTiff').Delete(filename)
gdal.Unlink(directory)
###############################################################################
# Test full world reprojection to WebMercator
def test_cog_small_world_to_web_mercator():
tab = [ 0 ]
def my_cbk(pct, _, arg):
assert pct >= tab[0]
tab[0] = pct
return 1
directory = '/vsimem/test_cog_small_world_to_web_mercator'
gdal.Mkdir(directory, 0o755)
filename = directory + '/cog.tif'
src_ds = gdal.Open('../gdrivers/data/small_world.tif')
ds = gdal.GetDriverByName('COG').CreateCopy(filename, src_ds,
options = ['TILING_SCHEME=GoogleMapsCompatible', 'COMPRESS=JPEG'],
callback = my_cbk,
callback_data = tab)
assert tab[0] == 1.0
assert ds
assert len(gdal.ReadDir(directory)) == 1 # check that the temp file has gone away
ds = None
ds = gdal.Open(filename)
assert ds.RasterCount == 3
assert ds.RasterXSize == 256
assert ds.RasterYSize == 256
assert ds.GetRasterBand(1).GetMaskFlags() == gdal.GMF_PER_DATASET
assert ds.GetRasterBand(1).GetBlockSize() == [256, 256]
gt = ds.GetGeoTransform()
expected_gt = [-20037508.342789248, 156543.033928041, 0.0,
20037508.342789248, 0.0, -156543.033928041]
for i in range(6):
if gt[i] != pytest.approx(expected_gt[i], abs=1e-10 * abs(expected_gt[i])):
assert False, gt
got_cs = [ds.GetRasterBand(i+1).Checksum() for i in range(3)]
if sys.platform == 'darwin' and gdal.GetConfigOption('TRAVIS', None) is not None:
assert got_cs != [0, 0, 0]
else:
assert got_cs == [26293, 23439, 14955]
assert ds.GetRasterBand(1).GetMaskBand().Checksum() == 17849
assert ds.GetRasterBand(1).GetOverviewCount() == 0
ds = None
_check_cog(filename)
src_ds = None
gdal.GetDriverByName('GTiff').Delete(filename)
gdal.Unlink(directory)
###############################################################################
# Test reprojection of small extent to WebMercator
def test_cog_byte_to_web_mercator():
tab = [ 0 ]
def my_cbk(pct, _, arg):
assert pct >= tab[0]
tab[0] = pct
return 1
directory = '/vsimem/test_cog_byte_to_web_mercator'
gdal.Mkdir(directory, 0o755)
filename = directory + '/cog.tif'
src_ds = gdal.Open('data/byte.tif')
ds = gdal.GetDriverByName('COG').CreateCopy(filename, src_ds,
options = ['TILING_SCHEME=GoogleMapsCompatible', 'ALIGNED_LEVELS=3'],
callback = my_cbk,
callback_data = tab)
assert tab[0] == 1.0
assert ds
assert len(gdal.ReadDir(directory)) == 1 # check that the temp file has gone away
ds = None
ds = gdal.Open(filename)
assert ds.RasterCount == 2
assert ds.RasterXSize == 1024
assert ds.RasterYSize == 1024
assert ds.GetRasterBand(1).GetMaskFlags() == gdal.GMF_ALPHA + gdal.GMF_PER_DATASET
assert ds.GetRasterBand(1).GetBlockSize() == [256,256]
gt = ds.GetGeoTransform()
expected_gt = [-13149614.849955443, 76.43702828517598, 0.0,
4070118.8821290657, 0.0, -76.43702828517598]
for i in range(6):
if gt[i] != pytest.approx(expected_gt[i], abs=1e-10 * abs(expected_gt[i])):
assert False, gt
assert ds.GetRasterBand(1).Checksum() in (4363, 4264, 4362) # 4264 on Mac , 4362 on Mac / Conda
assert ds.GetRasterBand(1).GetMaskBand().Checksum() == 4356
assert ds.GetRasterBand(1).GetOverviewCount() == 2
ds = None
_check_cog(filename)
# Use our generated COG as the input of the same COG generation: reprojection
# should be skipped
filename2 = directory + '/cog2.tif'
src_ds = gdal.Open(filename)
class my_error_handler(object):
def __init__(self):
self.debug_msg_list = []
self.other_msg_list = []
def handler(self, eErrClass, err_no, msg):
if eErrClass == gdal.CE_Debug:
self.debug_msg_list.append(msg)
else:
self.other_msg_list.append(msg)
handler = my_error_handler();
try:
gdal.PushErrorHandler(handler.handler)
gdal.SetCurrentErrorHandlerCatchDebug(True)
with gdaltest.config_option('CPL_DEBUG', 'COG'):
ds = gdal.GetDriverByName('COG').CreateCopy(filename2, src_ds,
options = ['TILING_SCHEME=GoogleMapsCompatible', 'ALIGNED_LEVELS=3'])
finally:
gdal.PopErrorHandler()
assert ds
assert 'COG: Skipping reprojection step: source dataset matches reprojection specifications' in handler.debug_msg_list
assert handler.other_msg_list == []
src_ds = None
ds = None
# Cleanup
gdal.GetDriverByName('GTiff').Delete(filename)
gdal.GetDriverByName('GTiff').Delete(filename2)
gdal.Unlink(directory)
###############################################################################
# Same as previous test case but with other input options
def test_cog_byte_to_web_mercator_manual():
directory = '/vsimem/test_cog_byte_to_web_mercator_manual'
gdal.Mkdir(directory, 0o755)
filename = directory + '/cog.tif'
src_ds = gdal.Open('data/byte.tif')
res = 76.43702828517598
minx = -13149614.849955443
maxx = minx + 1024 * res
maxy = 4070118.8821290657
miny = maxy - 1024 * res
ds = gdal.GetDriverByName('COG').CreateCopy(filename, src_ds,
options = ['BLOCKSIZE=256',
'TARGET_SRS=EPSG:3857',
'RES=%.18g' % res,
'EXTENT=%.18g,%.18g,%.18g,%.18g' % (minx,miny,maxx,maxy)])
assert ds
ds = None
ds = gdal.Open(filename)
assert ds.RasterCount == 2
assert ds.RasterXSize == 1024
assert ds.RasterYSize == 1024
assert ds.GetRasterBand(1).GetMaskFlags() == gdal.GMF_ALPHA + gdal.GMF_PER_DATASET
assert ds.GetRasterBand(1).GetBlockSize() == [256,256]
gt = ds.GetGeoTransform()
expected_gt = [-13149614.849955443, 76.43702828517598, 0.0,
4070118.8821290657, 0.0, -76.43702828517598]
for i in range(6):
if gt[i] != pytest.approx(expected_gt[i], abs=1e-10 * abs(expected_gt[i])):
assert False, gt
assert ds.GetRasterBand(1).Checksum() in (4363, 4264, 4362) # 4264 on Mac , 4362 on Mac / Conda
assert ds.GetRasterBand(1).GetMaskBand().Checksum() == 4356
assert ds.GetRasterBand(1).GetOverviewCount() == 2
ds = None
src_ds = None
gdal.GetDriverByName('GTiff').Delete(filename)
gdal.Unlink(directory)
###############################################################################
# Test OVERVIEWS creation option
def test_cog_overviews_co():
def my_cbk(pct, _, arg):
assert pct >= tab[0]
tab[0] = pct
return 1
directory = '/vsimem/test_cog_overviews_co'
filename = directory + '/cog.tif'
src_ds = gdal.Translate('', 'data/byte.tif',
options='-of MEM -outsize 2048 300')
for val in ['NONE', 'FORCE_USE_EXISTING']:
tab = [ 0 ]
ds = gdal.GetDriverByName('COG').CreateCopy(filename, src_ds,
options = ['OVERVIEWS=' + val],
callback = my_cbk,
callback_data = tab)
assert tab[0] == 1.0
assert ds
ds = None
ds = gdal.Open(filename)
assert ds.GetRasterBand(1).Checksum() == src_ds.GetRasterBand(1).Checksum()
assert ds.GetRasterBand(1).GetOverviewCount() == 0
ds = None
_check_cog(filename)
for val in ['AUTO', 'IGNORE_EXISTING']:
tab = [ 0 ]
ds = gdal.GetDriverByName('COG').CreateCopy(filename, src_ds,
options = ['OVERVIEWS=' + val],
callback = my_cbk,
callback_data = tab)
assert tab[0] == 1.0
assert ds
ds = None
ds = gdal.Open(filename)
assert ds.GetRasterBand(1).Checksum() == src_ds.GetRasterBand(1).Checksum()
assert ds.GetRasterBand(1).GetOverviewCount() == 2
assert ds.GetRasterBand(1).GetOverview(0).Checksum() != 0
ds = None
_check_cog(filename)
# Add overviews to source
src_ds.BuildOverviews('NONE', [2])
tab = [ 0 ]
ds = gdal.GetDriverByName('COG').CreateCopy(filename, src_ds,
options = ['OVERVIEWS=NONE'],
callback = my_cbk,
callback_data = tab)
assert tab[0] == 1.0
assert ds
ds = None
ds = gdal.Open(filename)
assert ds.GetRasterBand(1).Checksum() == src_ds.GetRasterBand(1).Checksum()
assert ds.GetRasterBand(1).GetOverviewCount() == 0
ds = None
_check_cog(filename)
tab = [ 0 ]
ds = gdal.GetDriverByName('COG').CreateCopy(filename, src_ds,
options = ['OVERVIEWS=FORCE_USE_EXISTING'],
callback = my_cbk,
callback_data = tab)
assert tab[0] == 1.0
assert ds
ds = None
ds = gdal.Open(filename)
assert ds.GetRasterBand(1).Checksum() == src_ds.GetRasterBand(1).Checksum()
assert ds.GetRasterBand(1).GetOverviewCount() == 1
assert ds.GetRasterBand(1).GetOverview(0).Checksum() == 0
ds = None
_check_cog(filename)
tab = [ 0 ]
ds = gdal.GetDriverByName('COG').CreateCopy(filename, src_ds,
options = ['OVERVIEWS=IGNORE_EXISTING'],
callback = my_cbk,
callback_data = tab)
assert tab[0] == 1.0
assert ds
ds = None
ds = gdal.Open(filename)
assert ds.GetRasterBand(1).Checksum() == src_ds.GetRasterBand(1).Checksum()
assert ds.GetRasterBand(1).GetOverviewCount() == 2
assert ds.GetRasterBand(1).GetOverview(0).Checksum() != 0
ds = None
_check_cog(filename)
src_ds = None
gdal.GetDriverByName('GTiff').Delete(filename)
gdal.Unlink(directory)
###############################################################################
# Test editing and invalidating a COG file
def test_cog_invalidation_by_data_change():
filename = '/vsimem/cog.tif'
src_ds = gdal.GetDriverByName('MEM').Create('', 100, 100)
ds = gdal.GetDriverByName('COG').CreateCopy(filename, src_ds,
options = ['COMPRESS=DEFLATE'])
ds = None
ds = gdal.Open(filename, gdal.GA_Update)
assert ds.GetMetadataItem('LAYOUT', 'IMAGE_STRUCTURE') == 'COG'
src_ds = gdal.Open('data/byte.tif')
data = src_ds.ReadRaster()
ds.GetRasterBand(1).WriteRaster(0, 0, 20, 20, data)
with gdaltest.error_handler():
ds.FlushCache()
ds = None
with gdaltest.error_handler():
ds = gdal.Open(filename)
assert ds.GetMetadataItem('LAYOUT', 'IMAGE_STRUCTURE') is None
ds = None
with pytest.raises(AssertionError, match='KNOWN_INCOMPATIBLE_EDITION=YES is declared in the file'):
_check_cog(filename)
with gdaltest.error_handler():
gdal.GetDriverByName('GTiff').Delete(filename)
###############################################################################
# Test editing and invalidating a COG file
def test_cog_invalidation_by_metadata_change():
filename = '/vsimem/cog.tif'
src_ds = gdal.GetDriverByName('MEM').Create('', 1, 1)
ds = gdal.GetDriverByName('COG').CreateCopy(filename, src_ds,
options = ['COMPRESS=DEFLATE'])
ds = None
ds = gdal.Open(filename, gdal.GA_Update)
ds.GetRasterBand(1).ComputeStatistics(False)
ds = None
with gdaltest.error_handler():
ds = gdal.Open(filename)
assert ds.GetMetadataItem('LAYOUT', 'IMAGE_STRUCTURE') is None
ds = None
with gdaltest.error_handler():
gdal.GetDriverByName('GTiff').Delete(filename)
###############################################################################
# Test a tiling scheme with a CRS with northing/easting axis order
# and non power-of-two ratios of scales.
def test_cog_northing_easting_and_non_power_of_two_ratios():
filename = '/vsimem/cog.tif'
x0_NZTM2000 = -1000000
y0_NZTM2000 = 10000000
blocksize = 256
scale_denom_zoom_level_14 = 1000
scale_denom_zoom_level_13 = 2500
scale_denom_zoom_level_12 = 5000
ds = gdal.Translate(filename, 'data/byte.tif',
options='-of COG -a_srs EPSG:2193 -a_ullr 1000001 5000001 1000006.6 4999995.4 -co TILING_SCHEME=NZTM2000 -co ALIGNED_LEVELS=2')
assert ds.RasterXSize == 1280
assert ds.RasterYSize == 1280
b = ds.GetRasterBand(1)
assert [(b.GetOverview(i).XSize, b.GetOverview(i).YSize) for i in range(b.GetOverviewCount())] == [(512, 512), (256, 256)]
gt = ds.GetGeoTransform()
res_zoom_level_14 = scale_denom_zoom_level_14 * 0.28e-3 # According to OGC Tile Matrix Set formula
assert gt == pytest.approx((999872, res_zoom_level_14, 0, 5000320, 0, -res_zoom_level_14), abs=1e-8)
# Check that gt origin matches the corner of a tile at zoom 14
res = gt[1]
tile_x = (gt[0] - x0_NZTM2000) / (blocksize * res)
assert tile_x == pytest.approx(round(tile_x))
tile_y = (y0_NZTM2000 - gt[3]) / (blocksize * res)
assert tile_y == pytest.approx(round(tile_y))
# Check that overview=0 corresponds to the resolution of zoom level=13 / OGC ScaleDenom = 2500
ovr0_xsize = b.GetOverview(0).XSize
assert float(ovr0_xsize) / ds.RasterXSize == float(scale_denom_zoom_level_14) / scale_denom_zoom_level_13
# Check that gt origin matches the corner of a tile at zoom 13
ovr0_res = res * scale_denom_zoom_level_13 / scale_denom_zoom_level_14
tile_x = (gt[0] - x0_NZTM2000) / (blocksize * ovr0_res)
assert tile_x == pytest.approx(round(tile_x))
tile_y = (y0_NZTM2000 - gt[3]) / (blocksize * ovr0_res)
assert tile_y == pytest.approx(round(tile_y))
# Check that overview=1 corresponds to the resolution of zoom level=12 / OGC ScaleDenom = 5000
ovr1_xsize = b.GetOverview(1).XSize
assert float(ovr1_xsize) / ds.RasterXSize == float(scale_denom_zoom_level_14) / scale_denom_zoom_level_12
# Check that gt origin matches the corner of a tile at zoom 12
ovr1_res = res * scale_denom_zoom_level_12 / scale_denom_zoom_level_14
tile_x = (gt[0] - x0_NZTM2000) / (blocksize * ovr1_res)
assert tile_x == pytest.approx(round(tile_x))
tile_y = (y0_NZTM2000 - gt[3]) / (blocksize * ovr1_res)
assert tile_y == pytest.approx(round(tile_y))
assert ds.GetMetadata("TILING_SCHEME") == {
"NAME": "NZTM2000",
"ZOOM_LEVEL": "14",
"ALIGNED_LEVELS": "2"
}
ds = None
gdal.GetDriverByName('GTiff').Delete(filename)
###############################################################################
# Test SPARSE_OK=YES
def test_cog_sparse():
filename = '/vsimem/cog.tif'
src_ds = gdal.GetDriverByName('MEM').Create('', 512, 512)
src_ds.GetRasterBand(1).Fill(255)
src_ds.WriteRaster(0, 0, 256, 256, '\x00' * 256 * 256)
src_ds.WriteRaster(256, 256, 128, 128, '\x00' * 128 * 128)
src_ds.BuildOverviews('NEAREST', [2])
gdal.GetDriverByName('COG').CreateCopy(filename, src_ds, options = ['BLOCKSIZE=128', 'SPARSE_OK=YES', 'COMPRESS=LZW'])
_check_cog(filename)
with gdaltest.config_option('GTIFF_HAS_OPTIMIZED_READ_MULTI_RANGE', 'YES'):
ds = gdal.Open(filename)
assert ds.GetRasterBand(1).GetMetadataItem('BLOCK_OFFSET_0_0', 'TIFF') is None
assert ds.GetRasterBand(1).GetMetadataItem('BLOCK_OFFSET_1_0', 'TIFF') is None
assert ds.GetRasterBand(1).GetMetadataItem('BLOCK_OFFSET_2_0', 'TIFF') is not None
assert ds.GetRasterBand(1).GetOverview(0).GetMetadataItem('BLOCK_OFFSET_0_0', 'TIFF') is None
assert ds.GetRasterBand(1).GetOverview(0).GetMetadataItem('BLOCK_OFFSET_1_0', 'TIFF') is not None
assert ds.GetRasterBand(1).ReadRaster(0, 0, 512, 512) == src_ds.GetRasterBand(1).ReadRaster(0, 0, 512, 512)
assert ds.GetRasterBand(1).GetOverview(0).ReadRaster(0, 0, 256, 256) == src_ds.GetRasterBand(1).GetOverview(0).ReadRaster(0, 0, 256, 256)
if check_libtiff_internal_or_at_least(4, 0, 11):
# This file is the same as the one generated above, except that we have,
# with an hex editor, zeroify all entries of TileByteCounts except the
# last tile of the main IFD, and for a tile when the next tile is sparse
ds = gdal.Open('data/cog_sparse_strile_arrays_zeroified_when_possible.tif')
assert ds.GetRasterBand(1).ReadRaster(0, 0, 512, 512) == src_ds.GetRasterBand(1).ReadRaster(0, 0, 512, 512)
ds = None
gdal.Unlink(filename)
###############################################################################
# Test SPARSE_OK=YES with mask
def test_cog_sparse_mask():
filename = '/vsimem/cog.tif'
src_ds = gdal.GetDriverByName('MEM').Create('', 512, 512, 4)
for i in range(4):
src_ds.GetRasterBand(i+1).SetColorInterpretation(gdal.GCI_RedBand + i)
src_ds.GetRasterBand(i+1).Fill(255)
src_ds.GetRasterBand(i+1).WriteRaster(0, 0, 256, 256, '\x00' * 256 * 256)
src_ds.GetRasterBand(i+1).WriteRaster(256, 256, 128, 128, '\x00' * 128 * 128)
src_ds.BuildOverviews('NEAREST', [2])
gdal.GetDriverByName('COG').CreateCopy(filename, src_ds, options = ['BLOCKSIZE=128', 'SPARSE_OK=YES', 'COMPRESS=JPEG', 'RESAMPLING=NEAREST'])
_check_cog(filename)
with gdaltest.config_option('GTIFF_HAS_OPTIMIZED_READ_MULTI_RANGE', 'YES'):
ds = gdal.Open(filename)
assert ds.GetRasterBand(1).GetMetadataItem('BLOCK_OFFSET_0_0', 'TIFF') is None
assert ds.GetRasterBand(1).GetMetadataItem('BLOCK_OFFSET_1_0', 'TIFF') is None
assert ds.GetRasterBand(1).GetMetadataItem('BLOCK_OFFSET_2_0', 'TIFF') is not None
assert ds.GetRasterBand(1).GetMaskBand().GetMetadataItem('BLOCK_OFFSET_0_0', 'TIFF') is None
assert ds.GetRasterBand(1).GetMaskBand().GetMetadataItem('BLOCK_OFFSET_1_0', 'TIFF') is None
assert ds.GetRasterBand(1).GetMaskBand().GetMetadataItem('BLOCK_OFFSET_2_0', 'TIFF') is not None
assert ds.GetRasterBand(1).GetOverview(0).GetMetadataItem('BLOCK_OFFSET_0_0', 'TIFF') is None
assert ds.GetRasterBand(1).GetOverview(0).GetMetadataItem('BLOCK_OFFSET_1_0', 'TIFF') is not None
assert ds.GetRasterBand(1).GetOverview(0).GetMaskBand().GetMetadataItem('BLOCK_OFFSET_0_0', 'TIFF') is None
assert ds.GetRasterBand(1).GetOverview(0).GetMaskBand().GetMetadataItem('BLOCK_OFFSET_1_0', 'TIFF') is not None
assert ds.GetRasterBand(1).ReadRaster(0, 0, 512, 512) == src_ds.GetRasterBand(1).ReadRaster(0, 0, 512, 512)
assert ds.GetRasterBand(1).GetMaskBand().ReadRaster(0, 0, 512, 512) == src_ds.GetRasterBand(4).ReadRaster(0, 0, 512, 512)
assert ds.GetRasterBand(1).GetOverview(0).ReadRaster(0, 0, 256, 256) == src_ds.GetRasterBand(1).GetOverview(0).ReadRaster(0, 0, 256, 256)
assert ds.GetRasterBand(1).GetOverview(0).GetMaskBand().ReadRaster(0, 0, 256, 256) == src_ds.GetRasterBand(4).GetOverview(0).ReadRaster(0, 0, 256, 256)
ds = None
gdal.Unlink(filename)
###############################################################################
# Test SPARSE_OK=YES with imagery at 0 and mask at 255
def test_cog_sparse_imagery_0_mask_255():
filename = '/vsimem/cog.tif'
src_ds = gdal.GetDriverByName('MEM').Create('', 512, 512, 4)
for i in range(4):
src_ds.GetRasterBand(i+1).SetColorInterpretation(gdal.GCI_RedBand + i)
src_ds.GetRasterBand(i+1).Fill(0 if i < 3 else 255)
src_ds.BuildOverviews('NEAREST', [2])
gdal.GetDriverByName('COG').CreateCopy(filename, src_ds, options = ['BLOCKSIZE=128', 'SPARSE_OK=YES', 'COMPRESS=JPEG'])
_check_cog(filename)
with gdaltest.config_option('GTIFF_HAS_OPTIMIZED_READ_MULTI_RANGE', 'YES'):
ds = gdal.Open(filename)
assert ds.GetRasterBand(1).GetMetadataItem('BLOCK_OFFSET_0_0', 'TIFF') is None
assert ds.GetRasterBand(1).GetMaskBand().GetMetadataItem('BLOCK_OFFSET_0_0', 'TIFF') is not None
assert ds.GetRasterBand(1).GetOverview(0).GetMetadataItem('BLOCK_OFFSET_0_0', 'TIFF') is None
assert ds.GetRasterBand(1).GetOverview(0).GetMaskBand().GetMetadataItem('BLOCK_OFFSET_0_0', 'TIFF') is not None
assert ds.GetRasterBand(1).ReadRaster(0, 0, 512, 512) == src_ds.GetRasterBand(1).ReadRaster(0, 0, 512, 512)
assert ds.GetRasterBand(1).GetMaskBand().ReadRaster(0, 0, 512, 512) == src_ds.GetRasterBand(4).ReadRaster(0, 0, 512, 512)
assert ds.GetRasterBand(1).GetOverview(0).ReadRaster(0, 0, 256, 256) == src_ds.GetRasterBand(1).GetOverview(0).ReadRaster(0, 0, 256, 256)
assert ds.GetRasterBand(1).GetOverview(0).GetMaskBand().ReadRaster(0, 0, 256, 256) == src_ds.GetRasterBand(4).GetOverview(0).ReadRaster(0, 0, 256, 256)
ds = None
gdal.Unlink(filename)
###############################################################################
# Test SPARSE_OK=YES with imagery at 0 or 255 and mask at 255
def test_cog_sparse_imagery_0_or_255_mask_255():
filename = '/vsimem/cog.tif'
src_ds = gdal.GetDriverByName('MEM').Create('', 512, 512, 4)
for i in range(4):
src_ds.GetRasterBand(i+1).SetColorInterpretation(gdal.GCI_RedBand + i)
for i in range(3):
src_ds.GetRasterBand(i+1).Fill(255)
src_ds.GetRasterBand(i+1).WriteRaster(0, 0, 256, 256, '\x00' * 256 * 256)
src_ds.GetRasterBand(i+1).WriteRaster(256, 256, 128, 128, '\x00' * 128 * 128)
src_ds.GetRasterBand(4).Fill(255)
src_ds.BuildOverviews('NEAREST', [2])
gdal.GetDriverByName('COG').CreateCopy(filename, src_ds, options = ['BLOCKSIZE=128', 'SPARSE_OK=YES', 'COMPRESS=JPEG', 'RESAMPLING=NEAREST'])
_check_cog(filename)
with gdaltest.config_option('GTIFF_HAS_OPTIMIZED_READ_MULTI_RANGE', 'YES'):
ds = gdal.Open(filename)
assert ds.GetRasterBand(1).GetMetadataItem('BLOCK_OFFSET_0_0', 'TIFF') is None
assert ds.GetRasterBand(1).GetMetadataItem('BLOCK_OFFSET_2_0', 'TIFF') is not None
assert ds.GetRasterBand(1).GetMaskBand().GetMetadataItem('BLOCK_OFFSET_0_0', 'TIFF') is not None
assert ds.GetRasterBand(1).GetOverview(0).GetMetadataItem('BLOCK_OFFSET_0_0', 'TIFF') is None
assert ds.GetRasterBand(1).GetOverview(0).GetMaskBand().GetMetadataItem('BLOCK_OFFSET_0_0', 'TIFF') is not None
assert ds.GetRasterBand(1).ReadRaster(0, 0, 512, 512) == src_ds.GetRasterBand(1).ReadRaster(0, 0, 512, 512)
assert ds.GetRasterBand(1).GetMaskBand().ReadRaster(0, 0, 512, 512) == src_ds.GetRasterBand(4).ReadRaster(0, 0, 512, 512)
assert ds.GetRasterBand(1).GetOverview(0).ReadRaster(0, 0, 256, 256) == src_ds.GetRasterBand(1).GetOverview(0).ReadRaster(0, 0, 256, 256)
assert ds.GetRasterBand(1).GetOverview(0).GetMaskBand().ReadRaster(0, 0, 256, 256) == src_ds.GetRasterBand(4).GetOverview(0).ReadRaster(0, 0, 256, 256)
ds = None
gdal.Unlink(filename)
###############################################################################
# Test SPARSE_OK=YES with imagery and mask at 0
def test_cog_sparse_imagery_mask_0():
filename = '/vsimem/cog.tif'
src_ds = gdal.GetDriverByName('MEM').Create('', 512, 512, 4)
for i in range(4):
src_ds.GetRasterBand(i+1).SetColorInterpretation(gdal.GCI_RedBand + i)
src_ds.GetRasterBand(i+1).Fill(0)
src_ds.BuildOverviews('NEAREST', [2])
gdal.GetDriverByName('COG').CreateCopy(filename, src_ds, options = ['BLOCKSIZE=128', 'SPARSE_OK=YES', 'COMPRESS=JPEG'])
_check_cog(filename)
with gdaltest.config_option('GTIFF_HAS_OPTIMIZED_READ_MULTI_RANGE', 'YES'):
ds = gdal.Open(filename)
assert ds.GetRasterBand(1).GetMetadataItem('BLOCK_OFFSET_0_0', 'TIFF') is None
assert ds.GetRasterBand(1).GetMaskBand().GetMetadataItem('BLOCK_OFFSET_0_0', 'TIFF') is None
assert ds.GetRasterBand(1).GetOverview(0).GetMetadataItem('BLOCK_OFFSET_0_0', 'TIFF') is None
assert ds.GetRasterBand(1).GetOverview(0).GetMaskBand().GetMetadataItem('BLOCK_OFFSET_0_0', 'TIFF') is None
assert ds.GetRasterBand(1).ReadRaster(0, 0, 512, 512) == src_ds.GetRasterBand(1).ReadRaster(0, 0, 512, 512)
assert ds.GetRasterBand(1).GetMaskBand().ReadRaster(0, 0, 512, 512) == src_ds.GetRasterBand(4).ReadRaster(0, 0, 512, 512)
assert ds.GetRasterBand(1).GetOverview(0).ReadRaster(0, 0, 256, 256) == src_ds.GetRasterBand(1).GetOverview(0).ReadRaster(0, 0, 256, 256)
assert ds.GetRasterBand(1).GetOverview(0).GetMaskBand().ReadRaster(0, 0, 256, 256) == src_ds.GetRasterBand(4).GetOverview(0).ReadRaster(0, 0, 256, 256)
ds = None
gdal.Unlink(filename)
###############################################################################
# Test ZOOM_LEVEL_STRATEGY option
@pytest.mark.parametrize('zoom_level_strategy,expected_gt',
[('AUTO', (-13110479.09147343, 76.43702828517416, 0.0, 4030983.1236470547, 0.0, -76.43702828517416)),
('LOWER', (-13110479.09147343, 76.43702828517416, 0.0, 4030983.1236470547, 0.0, -76.43702828517416)),
('UPPER', (-13100695.151852928, 38.21851414258708, 0.0, 4021199.1840265524, 0.0, -38.21851414258708))
])
def test_cog_zoom_level_strategy(zoom_level_strategy,expected_gt):
filename = '/vsimem/test_cog_zoom_level_strategy.tif'
src_ds = gdal.Open('data/byte.tif')
ds = gdal.GetDriverByName('COG').CreateCopy(filename, src_ds,
options = ['TILING_SCHEME=GoogleMapsCompatible',
'ZOOM_LEVEL_STRATEGY=' + zoom_level_strategy])
gt = ds.GetGeoTransform()
assert gt == pytest.approx(expected_gt, rel=1e-10)
# Test that the zoom level strategy applied on input data already on a
# zoom level doesn't lead to selecting another zoom level
filename2 = '/vsimem/test_cog_zoom_level_strategy_2.tif'
src_ds = gdal.Open('data/byte.tif')
ds2 = gdal.GetDriverByName('COG').CreateCopy(filename2, ds,
options = ['TILING_SCHEME=GoogleMapsCompatible',
'ZOOM_LEVEL_STRATEGY=' + zoom_level_strategy])
gt = ds2.GetGeoTransform()
assert gt == pytest.approx(expected_gt, rel=1e-10)
ds2 = None
gdal.Unlink(filename2)
ds = None
gdal.Unlink(filename)
###############################################################################
def test_cog_resampling_options():
filename = '/vsimem/test_cog_resampling_options.tif'
src_ds = gdal.Open('data/byte.tif')
ds = gdal.GetDriverByName('COG').CreateCopy(filename, src_ds,
options = ['TILING_SCHEME=GoogleMapsCompatible', 'WARP_RESAMPLING=NEAREST'])
cs1 = ds.GetRasterBand(1).Checksum()
ds = gdal.GetDriverByName('COG').CreateCopy(filename, src_ds,
options = ['TILING_SCHEME=GoogleMapsCompatible', 'WARP_RESAMPLING=CUBIC'])
cs2 = ds.GetRasterBand(1).Checksum()
ds = gdal.GetDriverByName('COG').CreateCopy(filename, src_ds,
options = ['TILING_SCHEME=GoogleMapsCompatible', 'RESAMPLING=NEAREST', 'WARP_RESAMPLING=CUBIC'])
cs3 = ds.GetRasterBand(1).Checksum()
assert cs1 != cs2
assert cs2 == cs3
src_ds = gdal.Translate('', 'data/byte.tif', options='-of MEM -outsize 129 0')
ds = gdal.GetDriverByName('COG').CreateCopy(filename, src_ds,
options = ['BLOCKSIZE=128', 'OVERVIEW_RESAMPLING=NEAREST'])
cs1 = ds.GetRasterBand(1).GetOverview(0).Checksum()
ds = gdal.GetDriverByName('COG').CreateCopy(filename, src_ds,
options = ['BLOCKSIZE=128','OVERVIEW_RESAMPLING=BILINEAR'])
cs2 = ds.GetRasterBand(1).GetOverview(0).Checksum()
ds = gdal.GetDriverByName('COG').CreateCopy(filename, src_ds,
options = ['BLOCKSIZE=128','RESAMPLING=NEAREST', 'OVERVIEW_RESAMPLING=BILINEAR'])
cs3 = ds.GetRasterBand(1).GetOverview(0).Checksum()
assert cs1 != cs2
assert cs2 == cs3
ds = None
gdal.Unlink(filename)
###############################################################################
def test_cog_invalid_warp_resampling():
filename = '/vsimem/test_cog_invalid_warp_resampling.tif'
src_ds = gdal.Open('data/byte.tif')
with gdaltest.error_handler():
assert gdal.GetDriverByName('COG').CreateCopy(filename, src_ds,
options = ['TILING_SCHEME=GoogleMapsCompatible', 'RESAMPLING=INVALID']) is None
gdal.Unlink(filename)
###############################################################################
def test_cog_overview_size():
src_ds = gdal.GetDriverByName('MEM').Create('', 20480 // 4, 40960 // 4)
src_ds.SetGeoTransform([1723840, 7 * 4, 0, 5555840, 0, -7 * 4])
srs = osr.SpatialReference()
srs.ImportFromEPSG(2193)
src_ds.SetProjection(srs.ExportToWkt())
filename = '/vsimem/test_cog_overview_size.tif'
ds = gdal.GetDriverByName('COG').CreateCopy(filename, src_ds, options = ['TILING_SCHEME=NZTM2000', 'ALIGNED_LEVELS=4', 'OVERVIEW_RESAMPLING=NONE'])
assert (ds.RasterXSize, ds.RasterYSize) == (20480 // 4, 40960 // 4)
ovr_size = [ (ds.GetRasterBand(1).GetOverview(i).XSize, ds.GetRasterBand(1).GetOverview(i).YSize) for i in range(ds.GetRasterBand(1).GetOverviewCount()) ]
assert ovr_size == [(2048, 4096), (1024, 2048), (512, 1024), (256, 512), (128, 256)]
gdal.Unlink(filename)
###############################################################################
# Test bugfix for https://github.com/OSGeo/gdal/issues/2946
def test_cog_float32_color_table():
src_ds = gdal.GetDriverByName('MEM').Create('', 1024, 1024, 1, gdal.GDT_Float32)
src_ds.GetRasterBand(1).Fill(1.0)
ct = gdal.ColorTable()
src_ds.GetRasterBand(1).SetColorTable(ct)
filename = '/vsimem/test_cog_float32_color_table.tif'
# Silence warning about color table not being copied
with gdaltest.error_handler():
ds = gdal.GetDriverByName('COG').CreateCopy(filename, src_ds) # segfault
assert ds
assert ds.GetRasterBand(1).GetColorTable() is None
assert struct.unpack('f', ds.ReadRaster(0,0,1,1))[0] == 1.0
assert struct.unpack('f', ds.GetRasterBand(1).GetOverview(0).ReadRaster(0,0,1,1))[0] == 1.0
gdal.Unlink(filename)
###############################################################################
# Test copy XMP
def test_cog_copy_xmp():
filename = '/vsimem/cog_xmp.tif'
src_ds = gdal.Open('../gdrivers/data/gtiff/byte_with_xmp.tif')
ds = gdal.GetDriverByName('COG').CreateCopy(filename, src_ds)
assert ds
ds = None
ds = gdal.Open(filename)
xmp = ds.GetMetadata('xml:XMP')
ds = None
assert 'W5M0MpCehiHzreSzNTczkc9d' in xmp[0], 'Wrong input file without XMP'
_check_cog(filename)
gdal.Unlink(filename)
| 41.910448
| 159
| 0.611801
|
8ac908fac0023aedda966a7be120c41a2f515451
| 17,066
|
py
|
Python
|
iotas/devices/moorescloud/holiday/driver.py
|
cheshrkat/holideck
|
4ee6e11d728ef24c32670e6b15a16de27c8f7406
|
[
"MIT"
] | 2
|
2018-01-03T09:58:00.000Z
|
2019-05-15T07:26:33.000Z
|
iotas/devices/moorescloud/holiday/driver.py
|
cheshrkat/holideck
|
4ee6e11d728ef24c32670e6b15a16de27c8f7406
|
[
"MIT"
] | null | null | null |
iotas/devices/moorescloud/holiday/driver.py
|
cheshrkat/holideck
|
4ee6e11d728ef24c32670e6b15a16de27c8f7406
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python
#
"""
Device-specific implementation for Holiday by MooresCloud
Homepage and documentation: http://dev.moorescloud.com/
Copyright (c) 2013, Mark Pesce.
License: MIT (see LICENSE for details)
"""
__author__ = 'Mark Pesce'
__version__ = '1.0b3'
__license__ = 'MIT'
import subprocess, time, os
import requests
import json
from multiprocessing import Queue
from bottle import request, abort
class Holiday:
# def old__init__(self, remote=False, address='sim', name='nameless'):
# self.numleds = 50
# self.leds = [] # Array of LED values. This may actually exist elsewhere eventually.
# self.address = ''
# self.name = name
# self.isSim = False
#
# if remote == False:
# self.remote = False
# if address == 'sim':
# self.pipename = os.path.join(os.path.expanduser('~'), 'pipelights.fifo')
# self.address = address
# else:
# self.pipename = "/run/pipelights.fifo"
# self.address = address
# try:
# self.pipe = open(self.pipename,"wb")
# except:
# print "Couldn't open the pipe, there's gonna be trouble!"
# ln = 0
# else:
# self.address = address
# self.remote = True
#
# for ln in range(self.numleds):
# self.leds.append([0x00, 0x00, 0x00]) # Create and clear an array of RGB LED values
#
# return
def __init__(self, remote=False, address='sim', name='nameless', queue=None):
self.numleds = 50
self.leds = [] # Array of LED values. This may actually exist elsewhere eventually.
self.address = ''
self.name = name
self.isSim = False
self.inDevMode = False
self.device_type = 'moorescloud.holiday'
self.appbase = '/home/holiday/bin/apps' # When turning apps on and off, go here for them.
# Using the new compositor 'compose' if True
self.compose = True
if (self.compose == True):
self.pid = os.getpid() # Must pass PID to compose
else:
self.pid = None
if remote == False:
self.remote = False
if address == 'sim':
self.queue = queue
self.isSim = True
self.compose = False # If simulator, we're not using the new compose
#print "IoTAS Queue at %s" % (self.queue,)
else:
if (self.compose == True):
self.pipename = '/run/compose.fifo'
print "Using compose 2nd generation compositor with PID %d" % self.pid
else:
self.pipename = "/run/pipelights.fifo"
self.address = address
try:
self.pipe = open(self.pipename,"wb")
except:
print "Couldn't open the pipe, there's gonna be trouble!"
ln = 0
else:
self.address = address
self.remote = True
for ln in range(self.numleds):
self.leds.append([0x00, 0x00, 0x00]) # Create and clear an array of RGB LED values
return
def __str__(self):
ret = 'name = %s\n' % self.name
ret = 'remote = %s\n' % self.remote
ret += 'address = %s\n' % self.address
ret += 'leds: [R, G, B]\n'
for i in range(self.numleds):
ret += " %d %s\n" % (i, self.leds[i])
return ret
def create_routes(self,theapp):
"""Create the bottle routes for the instance of this class
This is weird but apparently works due to the magic of Python
There is a URL schema for IoTAS. You should follow its guidelines."""
routebase = """/iotas/0.1/device/moorescloud.holiday/%s/""" % self.name
@theapp.get(routebase + 'hostname')
def get_hostname():
"""Return the hostname as nicely formatted JSON"""
import socket
n = { "hostname": socket.gethostname() }
return json.dumps(n)
@theapp.put(routebase + 'hostname')
def set_hostname():
"""Sets the hostname for the device, given a nicely formatted JSON request
triggers a script in /home/holiday/util to do the work"""
d = request.body.read()
print "Received %s" % d
try:
dj = json.loads(d)
except:
print "Bad JSON data, aborting"
abort(400, "Bad JSON")
return
if 'hostname' in dj:
try:
c = subprocess.check_output(['/home/holiday/util/set_hostname.sh', dj['hostname']])
except subprocess.CalledProcessError:
abort(500, "Hostname change failed")
else:
abort(400, "No hostname provided")
return
return
@theapp.get(routebase + 'devmode')
def get_devmode():
""" Return a boolean indicating whether the Holiday is in developer mode or not"""
if self.isSim == True:
the_response = { "devmode": self.inDevMode }
else:
try:
c = subprocess.check_output(['/home/holiday/util/get_devmode.sh'])
the_response = { "devmode": True }
except subprocess.CalledProcessError:
the_response = { "devmode": False }
return json.dumps(the_response)
@theapp.put(routebase + 'devmode')
def set_devmode():
""" Sets developer mode to the state passed in the nicely formatted JSON """
d = request.body.read()
print "Received %s" % d
try:
dj = json.loads(d)
except:
print "Bad JSON data, aborting"
abort(400, "Bad JSON")
return
if 'devmode' in dj:
devbool = dj['devmode']
else:
print "No devmode found, aborting"
abort(400, "No devmode specified")
return
if self.isSim == True:
self.inDevMode = devbool
else:
try:
c = subprocess.check_output(['/home/holiday/util/set_devmode.sh', str(devbool)])
except subprocess.CalledProcessError:
abort(500, "Developer mode set failed")
return
return
@theapp.get(routebase + 'update')
def get_update_status():
"""Return True if there are updates to be done"""
try:
c = subprocess.check_output(['/home/holiday/updates/test_updates.sh'])
updates_ready = True
except subprocess.CalledProcessError:
updates_ready = False
n = { "update": updates_ready }
return json.dumps(n)
@theapp.put(routebase + 'update')
def do_update():
"""Runs script to install updates"""
try:
c = subprocess.check_output(['/home/holiday/updates/do_updates.sh'])
updates_done = True
except subprocess.CalledProcessError:
updates_done = False
n = { "update": updates_done }
return json.dumps(n)
@theapp.put(routebase + 'rainbow')
def do_rainbow():
"""Starts/stops the rainbow app"""
d = request.body.read()
print "Received %s" % d
try:
dj = json.loads(d)
except:
print "Bad JSON data, aborting"
abort(400, "Bad JSON")
return
if (dj['isStart'] == True):
print "starting rainbow app"
app_path = os.path.join(self.appbase, 'rainbow')
print 'app_path: %s' % app_path
try:
c = subprocess.call(['/home/holiday/scripts/start-app.sh', app_path], shell=False)
print "rainbow app started"
success = True
except subprocess.CalledProcessError:
print "Error starting process"
success = False
else:
print "stopping rainbow app"
try:
c = subprocess.call(['/home/holiday/scripts/stop-app.sh'], shell=True)
print "rainbow app stopped"
success = True
except subprocess.CalledProcessError:
print "Error stopping process"
success = False
return json.dumps({"success": success})
@theapp.put(routebase + 'runapp')
def do_runapp():
"""Starts/stops the named app"""
d = request.body.read()
print "Received %s" % d
try:
dj = json.loads(d)
except:
print "Bad JSON data, aborting"
abort(400, "Bad JSON")
return
# Makes sure we have everything we need here
if (('isStart' in dj) and ('appname' in dj)):
print "We have the parameters"
else:
print "Missing JSON parameters, aborting"
abort(400, "Missing JSON parameters")
return
if (dj['isStart'] == True):
print "starting app %s" % dj['appname']
app_path = os.path.join(self.appbase, dj['appname'])
print 'app_path: %s' % app_path
try:
c = subprocess.call(['/home/holiday/scripts/start-app.sh', app_path], shell=False)
print "%s app started" % dj['appname']
success = True
except subprocess.CalledProcessError:
print "Error starting process"
success = False
else:
print "stopping %s app" % dj['appname']
try:
c = subprocess.call(['/home/holiday/scripts/stop-app.sh'], shell=True)
print "%s app stopped" % dj['appname']
success = True
except subprocess.CalledProcessError:
print "Error stopping process"
success = False
return json.dumps({"success": success})
@theapp.get(routebase + 'version')
def get_version():
return json.dumps({ "version": __version__ })
@theapp.get(routebase + 'swift_version')
def get_swift_version():
return json.dumps({ "version": "1.0b3" })
@theapp.get(routebase)
def get_holidays():
return json.dumps(self.get_devices())
@theapp.put(routebase + 'setlights')
def do_setlights():
d = request.body.read()
print "Received %s" % d
try:
dj = json.loads(d)
print len(dj['lights'])
except:
print "Bad JSON data, aborting..."
return json.dumps({"value": False})
resp = self.setlights(dj)
return json.dumps(resp)
@theapp.put(routebase + 'setvalues')
def do_setvalues():
d = request.body.read()
#print "Received %s" % d
try:
dj = json.loads(d)
except:
print "Bad JSON data, aborting..."
return json.dumps({"value": False})
resp = self.do_setvalues(dj['values'])
return json.dumps(resp)
@theapp.get(routebase + 'led/<num>/value')
def read_led_value(num):
print "read_led_value %s" % num
value = self.get_led_value(int(num))
return """{"led": %s, "value": %s}""" % (num, value)
@theapp.put(routebase + 'led/<num>/value')
def set_led_value(num):
d = request.body.read()
print "Received %s" % d
try:
dj = json.loads(d)
except:
print "Bad JSON data, aborting..."
return json.dumps({"value": False})
if 'value' in dj:
print "there is a value"
triplet = dj['value']
else:
return json.dumps({"value": False})
print "set_led_value %s %s" % (int(num), triplet)
self.set_led_value(int(num), triplet)
return """{"led": %s, "value": %s}""" % (num, triplet)
@theapp.put(routebase + 'gradient')
def gradient():
d = request.body.read()
#print "Received %s" % d
try:
dj = json.loads(d)
except:
print "Bad JSON data, aborting..."
return json.dumps({"value": False})
if 'begin' in dj:
#print "there is a beginning"
begin = dj['begin']
else:
return json.dumps({"value": False})
if 'end' in dj:
#print "there is a ending"
end = dj['end']
else:
return json.dumps({"value": False})
if 'steps' in dj:
#print "and some steps"
steps = dj['steps']
else:
return json.dumps({"value": False})
print "gradient %s %s %s" % (begin, end, steps)
resp = self.gradient(begin, end, int(steps))
return json.dumps(resp)
def get_devices(self):
l = { "device_type": "Holiday", "number": 50, "version": 0.1 }
return [ l ]
def get_led_value(self, lednum):
if lednum < self.numleds:
return self.leds[lednum]
else:
raise IndexError("Illegal LED number")
def set_led_value(self, lednum, value):
if lednum < self.numleds:
self.leds[lednum][0] = value[0]
self.leds[lednum][1] = value[1]
self.leds[lednum][2] = value[2]
self.render()
#print self.leds
return self.leds[lednum]
else:
raise IndexError("Illegal LED number")
def get_light_values(self):
return { "lights": self.leds }
def set_light_values(self, value):
ln = 0
while (ln < self.numleds):
self.leds[ln][0] = value[0] # White please
self.leds[ln][1] = value[1]
self.leds[ln][2] = value[2]
ln = ln + 1
self.render()
return { "lights": self.leds }
def setlights(self,jsondata):
global awt_colors
if 'lights' not in jsondata:
return { "value": False }
ln = 0
for bulb in jsondata['lights']:
if bulb in awt_colors:
#print "Got a match!"
colorval = awt_colors[bulb]
else:
#print bulb[1:]
colorval = int(bulb[1:], 16)
self.leds[ln][0] = (colorval & 0xFF0000) >> 16
self.leds[ln][1] = (colorval & 0xFF00) >> 8
self.leds[ln][2] = colorval & 0xFF
ln += 1
self.render()
return { "value": True }
def do_setvalues(self, values):
ln = 0
while (ln < self.numleds):
self.leds[ln][0] = values[ln][0] # White please
self.leds[ln][1] = values[ln][1]
self.leds[ln][2] = values[ln][2]
ln = ln + 1
self.render()
return { "lights": self.leds }
def gradient(self, begin, end, steps):
"""Do it the new-fashioned way"""
steps = float(steps)
base = [0.0,0.0,0.0]
base[0] = begin[0]
base[1] = begin[1]
base[2] = begin[2]
incr = [0.0,0.0,0.0]
incr[0] = float((end[0]-begin[0]) / steps)
incr[1] = float((end[1]-begin[1]) / steps)
incr[2] = float((end[2]-begin[2]) / steps)
print "r-incr %f g-incr %f b-incr %f" % (incr[0],incr[1],incr[2])
s = 0.0
gr = [0,0,0]
while (s < steps):
gr[0] = int(base[0] + (incr[0] * s))
gr[1] = int(base[1] + (incr[1] * s))
gr[2] = int(base[2] + (incr[2] * s))
self.set_light_values(gr)
s = s + 1
time.sleep(.02)
return { "value": True }
def nrl(self, data):
"""Set the NRL team colours based on the passed value"""
team_num = int(data['team'])
print "team_num %d" % team_num
if (team_num < 1) or (team_num > 16):
return { 'value': False }
try:
resp = subprocess.call(['/home/mpesce/sport/nrl', str(team_num)])
except:
return { 'value': False }
return { 'value': True }
def afl(self, data):
"""Set the NRL team colours based on the passed value"""
team_num = int(data['team'])
if (team_num < 1) or (team_num > 18):
return { 'value': False }
try:
resp = subprocess.call(['/home/mpesce/sport/afl', str(team_num)])
except:
return { 'value': False }
return { 'value': True }
def old_render(self):
"""Render the LED array to the Light"""
"""This version is safe because it renders to a string in memory"""
if (self.remote == True):
hol_vals = []
for glz in self.leds:
hol_vals.append("#%02x%02x%02x" % (glz[0], glz[1], glz[2]))
hol_msg = { "lights": hol_vals }
hol_msg_str = json.dumps(hol_msg)
print hol_msg_str
urlstr = 'http://%s/device/light/setlights' % self.address
r = requests.put(urlstr, data=hol_msg_str)
else:
echo = ""
ln = 0
while (ln < self.numleds):
tripval = (self.leds[ln][0] * 65536) + (self.leds[ln][1] * 256) + self.leds[ln][2]
#echo = echo + "%6X" % tripval + "\\" + "\\" + "x0a" # magic pixie formatting eh?
echo = echo + "%06X\n" % tripval
ln = ln+1
#print echo
#os.system("""%s""" % echo)
self.pipe.write(echo)
self.pipe.flush()
#os.system("""%s | /srv/http/cgi-bin/setlights""" % echo)
return
def render(self):
"""Render the LED array to the Light"""
"""This version is safe because it renders to a string in memory"""
if (self.remote == True):
hol_vals = []
for glz in self.leds:
hol_vals.append("#%02x%02x%02x" % (glz[0], glz[1], glz[2]))
hol_msg = { "lights": hol_vals }
hol_msg_str = json.dumps(hol_msg)
print hol_msg_str
urlstr = 'http://%s/device/light/setlights' % self.address
r = requests.put(urlstr, data=hol_msg_str)
else:
if (self.compose == True):
"""Render the LED array to the Holiday
This is done by composing a text string in memory
Which is then written out to the compositor FIFO pipe in a single go,
So it should be reasonably fast."""
rend = []
rend.append("0x000010\n") # clear flag set for now
pid_str = "0x%06x\n" % self.pid
rend.append(pid_str)
#print pid_str
#compositor_str = compositor_str + pid_str # First two lines are placeholders for now, will be meaningful
ln = 0
while (ln < self.numleds):
tripval = (self.leds[ln][0] * 65536) + (self.leds[ln][1] * 256) + self.leds[ln][2]
rend.append("0x%06X\n" % tripval)
ln = ln+1
self.pipe.write(''.join(rend))
self.pipe.flush()
else:
echo = ""
ln = 0
slist = []
while (ln < self.numleds):
tripval = (self.leds[ln][0] * 65536) + (self.leds[ln][1] * 256) + self.leds[ln][2]
#echo = echo + "%6X" % tripval + "\\" + "\\" + "x0a" # magic pixie formatting eh?
#echo = echo + "%06X\n" % tripval
slist.append("%06X\n" % tripval)
ln = ln+1
#print echo
echo = ''.join(slist) # Meant to be very much faster
if self.isSim == True:
self.queue.put(echo, block=False)
else:
self.pipe.write(echo)
self.pipe.flush()
return
def on(self):
return set_light_values([255,255,255])
def off(self):
return set_light_values([0,0,0])
awt_colors = dict( black=0x808080, blue=0x8080FF, cyan=0x80FFFF, darkGray=0x818181, gray=0x848484,
green=0x80FF80, lightGray=0x8f8f8f, magenta=0xFF80FF, orange=0xFF9780, pink=0xFF90EF,
red=0xFF8080, white=0xFFFFFF, yellow=0xFFFF80, BLACK=0x808080, BLUE=0x8080FF, CYAN=0x80FFFF,
DARK_GRAY=0x818181, GRAY=0x848484,GREEN=0x80FF80, LIGHT_GRAY=0x8f8f8f, MAGENTA=0xFF80FF,
ORANGE=0xFF9780, PINK=0xFF90EF, RED=0xFF8080, WHITE=0xFFFFFF, YELLOW=0xFFFF80 )
# Do some unit tests when invoked from the command line
if __name__ == '__main__':
hol = Holiday(remote=True, address='lithia.local')
print hol
| 28.73064
| 110
| 0.629966
|
6a2a1735247eedda01f0b7c2570fa2e4655934e6
| 40
|
py
|
Python
|
00-basics/09-string_formatters_01.py
|
ralexrivero/python_fundation
|
34a855db7380d3d91db6a8f02d97f287d038ef5f
|
[
"Apache-2.0"
] | 1
|
2021-09-19T04:09:48.000Z
|
2021-09-19T04:09:48.000Z
|
00-basics/09-string_formatters_01.py
|
ralexrivero/python_fundation
|
34a855db7380d3d91db6a8f02d97f287d038ef5f
|
[
"Apache-2.0"
] | null | null | null |
00-basics/09-string_formatters_01.py
|
ralexrivero/python_fundation
|
34a855db7380d3d91db6a8f02d97f287d038ef5f
|
[
"Apache-2.0"
] | null | null | null |
print("Sammy has {} ballons.".format(5))
| 40
| 40
| 0.675
|
a085b6557592cff670c7bc6251f09356ad82a7f0
| 71
|
py
|
Python
|
pytgraph/types/traits.py
|
intv0id/pytgraph
|
0e42357e28fc32c478dfad610f331ea0b3d5d735
|
[
"BSD-3-Clause"
] | 1
|
2019-06-21T15:35:11.000Z
|
2019-06-21T15:35:11.000Z
|
pytgraph/types/traits.py
|
intv0id/pytgraph
|
0e42357e28fc32c478dfad610f331ea0b3d5d735
|
[
"BSD-3-Clause"
] | 1
|
2019-12-21T14:23:43.000Z
|
2019-12-21T14:23:43.000Z
|
pytgraph/types/traits.py
|
intv0id/pytgraph
|
0e42357e28fc32c478dfad610f331ea0b3d5d735
|
[
"BSD-3-Clause"
] | null | null | null |
from traitlets import TraitType
class GraphData(TraitType):
pass
| 11.833333
| 31
| 0.774648
|
793c86f73a4d55821ca6cb2b6b8686ce149599d7
| 8,853
|
py
|
Python
|
aioouimeaux/__main__.py
|
frawau/aioouimeaux
|
ea473ded95e41e350793b0e289944a359049c501
|
[
"BSD-3-Clause"
] | 2
|
2019-01-26T02:44:14.000Z
|
2019-08-06T00:40:56.000Z
|
aioouimeaux/__main__.py
|
frawau/aioouimeaux
|
ea473ded95e41e350793b0e289944a359049c501
|
[
"BSD-3-Clause"
] | 1
|
2019-05-23T22:35:27.000Z
|
2019-05-25T20:23:50.000Z
|
aioouimeaux/__main__.py
|
frawau/aioouimeaux
|
ea473ded95e41e350793b0e289944a359049c501
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding:utf-8 -*-
#
# This application is an example on how to use aioouimeaux
#
# Copyright (c) 2016 François Wautier
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
# of the Software, and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all copies
# or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
# IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE
import sys
import asyncio as aio
from functools import partial
import argparse
import socket
import asyncio as aio
from aioouimeaux.wemo import WeMo
from collections import OrderedDict
wemodoi = None #Device of interest
listoffunc=OrderedDict()
listoffunc["Get Home Id"] = (lambda dev: dev.basicevent.GetHomeId(),"HomeId")
listoffunc["Get MAC Address"] = (lambda dev: dev.basicevent.GetMacAddr(),"MacAddr")
listoffunc["Get Device Id"] = (lambda dev: dev.basicevent.GetDeviceId(),"")
listoffunc["Get Serial Number"] = (lambda dev: dev.serialnumber,"")
listoffunc["Get Power Consumption"] = (lambda dev: dev.insight_params,"")
async def showinfo(future,info,dev,key=""):
try:
await aio.wait_for(future, timeout=5)
resu = future.result()
if key:
print("\n{}: {} is {}".format(dev.name, info,resu[key]))
else:
print("\n{}: {} is {}".format(dev.name, info,resu))
except Exception as e:
print("\nException for {}: {} failed with {}".format(dev.name, info,e))
unregister_device(dev)
async def await_result(future,dev):
try:
await aio.wait_for(future, timeout=5)
resu = future.result()
#TODO Could log on debug
except Exception as e:
print("\nException for {}: On/Off failed with {e}".format(dev.name))
unregister_device(dev)
def readin():
"""Reading from stdin and displaying menu"""
global MyWeMo
global wemodoi
selection = sys.stdin.readline().strip("\n")
devices = MyWeMo.list_devices()
devices.sort()
lov=[ x for x in selection.split(" ") if x != ""]
if lov:
if wemodoi:
#try:
if True:
selection = int(lov[0])
if selection < 0 :
print("Invalid selection.")
else:
if wemodoi.device_type == "Switch":
if selection == 1:
if len(lov) >1:
if lov[1].lower() in ["1","on","true"]:
future = wemodoi.on()
else:
future = wemodoi.off()
xx = aio.ensure_future(await_result(future,wemodoi))
wemodoi=None
else:
print("Error: For power you must indicate on or off\n")
selection -= 1
if selection > (len(listoffunc)+2):
print("Invalid selection.")
elif selection == (len(listoffunc)+1):
print("Function supported by {}".format(wemodoi.name))
wemodoi.explain(prefix="\t")
wemodoi = None
elif selection == (len(listoffunc)+2):
if len(lov) >1:
lok = [ x.strip() for x in lov[1].strip().split(".")]
fcnt = wemodoi
for key in lok:
fcnt = getattr(fcnt,key,None)
if fcnt is None:
print("Unknown function {}".format(lov[1].strip()))
break
if fcnt:
if callable(fcnt):
param={}
if len(lov)>2:
param={}
key=None
for x in range(2,len(lov)):
if key:
param[key]=lov[x]
key=None
else:
key=lov[x]
if key:
param[key]=""
if param:
future = fcnt(**param)
else:
future = fcnt()
xx = aio.ensure_future(showinfo(future,".".join(lok),wemodoi,""))
else:
print(getattr(wemodoi,fcnt,None))
wemodoi = None
else:
print("We need a function to execute")
elif selection>0:
what = [x for x in listoffunc.keys()][selection-1]
fcnt,key = listoffunc[what]
what = what.replace("Get","").strip()
try:
future = fcnt(wemodoi)
if aio.isfuture(future):
xx = aio.ensure_future(showinfo(future,what,wemodoi,key))
except:
print("Operation not supported by device.")
else:
print("\n{}: {} is {}".format(wemodoi.name, what, future))
wemodoi = None
else:
wemodoi = None
#except:
#print (f"\nError: Selection must be a number between 0 and {len(listoffunc)+3}.\n")
else:
try:
if int(lov[0]) > 0:
devices = MyWeMo.list_devices()
devices.sort()
if int(lov[0]) <=len(devices):
wemodoi=MyWeMo.devices[devices[int(lov[0])-1]]
else:
print("\nError: Not a valid selection.\n")
except:
print ("\nError: Selection must be a number.\n")
if wemodoi:
print("Select Function for {}:".format(wemodoi.name))
selection = 1
if wemodoi.device_type == "Switch":
print("\t[{}]\tPower (0 or 1)".format(selection))
selection += 1
for x in listoffunc:
print("\t[{}]\t{}".format(selection,x))
selection += 1
print("\t[{}]\tExplain".format(selection))
print("\t[{}]\tFunction X (e.g. basicevent.GetHomeInfo see 'explain')".format(selection+1))
print("")
print("\t[0]\tBack to device selection")
else:
idx=1
print("Select Device:")
devices = MyWeMo.list_devices()
devices.sort()
for x in devices:
print("\t[{}]\t{}".format(idx,x))
idx+=1
print("")
print("Your choice: ", end='',flush=True)
def report_status(dev):
print("{} {} status is now {}".format(dev.device_type, dev.name, dev.get_state() and 'On' or 'Off'))
def register_device(dev):
dev.register_callback("statechange", report_status)
#dev.explain()
def unregister_device(dev):
global MyWeMo
print("Device {} with {}".format(dev, dev.basicevent.eventSubURL))
MyWeMo.device_gone(dev)
loop = aio.get_event_loop()
#loop.set_debug(True)
MyWeMo = WeMo(callback=register_device)
MyWeMo.start()
try:
loop.add_reader(sys.stdin,readin)
print("Hit \"Enter\" to start")
print("Use Ctrl-C to quit")
loop.run_forever()
except KeyboardInterrupt:
print("\n", "Exiting at user's request")
finally:
# Close the reader
loop.remove_reader(sys.stdin)
MyWeMo.stop()
loop.run_until_complete(aio.sleep(1))
loop.close()
| 40.797235
| 104
| 0.495312
|
e24dc1d51069f46e528280ff2eb58115dda49bfc
| 178
|
py
|
Python
|
kungfucms/themes/__init__.py
|
kungfusoft/kungfucms
|
03a4c322f3d074dba47fd512c638556059ac6565
|
[
"MIT"
] | null | null | null |
kungfucms/themes/__init__.py
|
kungfusoft/kungfucms
|
03a4c322f3d074dba47fd512c638556059ac6565
|
[
"MIT"
] | null | null | null |
kungfucms/themes/__init__.py
|
kungfusoft/kungfucms
|
03a4c322f3d074dba47fd512c638556059ac6565
|
[
"MIT"
] | null | null | null |
# PROJECT : kungfucms
# TIME : 19-2-14 下午2:12
# AUTHOR : Younger Shen
# EMAIL : younger.x.shen@gmail.com
# CELL : 13811754531
# WECHAT : 13811754531
# WEB : https://punkcoder.cn
| 22.25
| 34
| 0.691011
|
c23f42eb6cfc2377825bb84dd15c19b33c9c4027
| 417
|
py
|
Python
|
backend/summer_thunder_30013/wsgi.py
|
crowdbotics-apps/summer-thunder-30013
|
998a1d4ae8f40f8bd09994e194424698f0a2f905
|
[
"FTL",
"AML",
"RSA-MD"
] | null | null | null |
backend/summer_thunder_30013/wsgi.py
|
crowdbotics-apps/summer-thunder-30013
|
998a1d4ae8f40f8bd09994e194424698f0a2f905
|
[
"FTL",
"AML",
"RSA-MD"
] | 27
|
2021-08-21T22:42:39.000Z
|
2021-10-04T15:41:12.000Z
|
backend/summer_thunder_30013/wsgi.py
|
crowdbotics-apps/summer-thunder-30013
|
998a1d4ae8f40f8bd09994e194424698f0a2f905
|
[
"FTL",
"AML",
"RSA-MD"
] | null | null | null |
"""
WSGI config for summer_thunder_30013 project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'summer_thunder_30013.settings')
application = get_wsgi_application()
| 24.529412
| 80
| 0.798561
|
ba79c942b34df054f0d551e5e500a386f9366159
| 9,290
|
py
|
Python
|
elegantrl/train/config.py
|
shengrenhou/ElegantRL
|
a99ad3336410eb4ae632211d4b8b8c35acfeef6d
|
[
"Apache-2.0"
] | 752
|
2021-02-10T09:23:00.000Z
|
2021-09-02T18:04:46.000Z
|
elegantrl/train/config.py
|
ashishsengupta/ElegantRL
|
a29c58e39ea9fd2936f2c3434de4a7a8cc201739
|
[
"Apache-2.0"
] | 34
|
2021-02-10T14:18:25.000Z
|
2021-08-25T12:10:48.000Z
|
elegantrl/train/config.py
|
ashishsengupta/ElegantRL
|
a29c58e39ea9fd2936f2c3434de4a7a8cc201739
|
[
"Apache-2.0"
] | 141
|
2021-02-15T21:12:01.000Z
|
2021-09-02T09:08:45.000Z
|
import os
import torch
import numpy as np
class Arguments: # [ElegantRL.2021.10.21]
"""
Configuration map.
:param env[object]: the environment object in ElegantRL.
:param agent[object]: the agent object in ElegantRL.
**Attributes for environment setup**
Attributes
----------------
env : object
environment object in ElegantRL.
env_num : int
number of sub-environments. For VecEnv, env_num > 1.
max_step : int
max step of an episode.
state_dim : int
state dimension of the environment.
action_dim : int
action dimension of the environment.
if_discrete : boolean
discrete or continuous action space.
target_return : float
target average episodic return.
**Attributes for model training**
Attributes
----------------
agent : object
agent object in ElegantRL.
if_off_policy : boolean
off-policy or on-policy for the DRL algorithm.
net_dim : int
neural network width.
max_memo : int
capacity of replay buffer.
batch_size : int
number of transitions sampled in one iteration.
target_step : int
repeatedly update network to keep critic's loss small.
repeat_times : int
collect target_step, then update network.
break_step : int
break training after total_step > break_step.
if_allow_break : boolean
allow break training when reach goal (early termination).
if_per_or_gae : boolean
use Prioritized Experience Replay (PER) or not for off-policy algorithms.
use Generalized Advantage Estimation or not for on-policy algorithms.
gamma : float
discount factor of future rewards.
reward_scale : int
an approximate target reward.
learning_rate : float
the learning rate.
soft_update_tau : float
soft update parameter for target networks.
**Attributes for model evaluation**
Attributes
----------------
eval_env : object
environment object for model evaluation.
eval_gap : int
time gap for periodical evaluation (in seconds).
eval_times1 : int
number of times that get episode return in first.
eval_times2 : int
number of times that get episode return in second.
eval_gpu_id : int or None
the GPU id for the evaluation environment.
-1 means use cpu, >=0 means use GPU, None means set as learner_gpus[0].
if_overwrite : boolean
save policy networks with different episodic return separately or overwrite.
**Attributes for resource allocation**
Attributes
----------------
worker_num : int
rollout workers number per GPU (adjust it to get high GPU usage).
thread_num : int
cpu_num for evaluate model.
random_seed : int
initialize random seed in ``init_before_training``.
learner_gpus : list
GPU ids for learner.
workers_gpus : list
GPU ids for worker.
ensemble_gpus : list
GPU ids for population-based training (PBT).
ensemble_gap : list
time gap for leaderboard update in tournament-based ensemble training.
cwd : string
directory path to save the model.
if_remove : boolean
remove the cwd folder? (True, False, None:ask me).
"""
def __init__(self, env, agent):
self.env = env # the environment for training
self.env_num = getattr(env, 'env_num', 1) # env_num = 1. In vector env, env_num > 1.
self.max_step = getattr(env, 'max_step', None) # the max step of an episode
self.state_dim = getattr(env, 'state_dim', None) # vector dimension (feature number) of state
self.action_dim = getattr(env, 'action_dim', None) # vector dimension (feature number) of action
self.if_discrete = getattr(env, 'if_discrete', None) # discrete or continuous action space
self.target_return = getattr(env, 'target_return', None) # target average episode return
self.agent = agent # Deep Reinforcement Learning algorithm
self.if_off_policy = agent.if_off_policy # agent is on-policy or off-policy
if self.if_off_policy: # off-policy
self.net_dim = 2 ** 8 # the network width
self.max_memo = 2 ** 21 # capacity of replay buffer
self.batch_size = self.net_dim # num of transitions sampled from replay buffer.
self.target_step = 2 ** 10 # repeatedly update network to keep critic's loss small
self.repeat_times = 2 ** 0 # collect target_step, then update network
self.if_per_or_gae = False # use PER (Prioritized Experience Replay) for sparse reward
else: # on-policy
self.net_dim = 2 ** 9 # the network width
self.max_memo = 2 ** 12 # capacity of replay buffer
self.batch_size = self.net_dim * 2 # num of transitions sampled from replay buffer.
self.target_step = self.max_memo # repeatedly update network to keep critic's loss small
self.repeat_times = 2 ** 3 # collect target_step, then update network
self.if_per_or_gae = False # use PER: GAE (Generalized Advantage Estimation) for sparse reward
self.gamma = 0.99 # discount factor of future rewards
self.reward_scale = 2 ** 0 # an approximate target reward usually be closed to 256
self.learning_rate = 2 ** -15 # 2 ** -14 ~= 3e-5
self.soft_update_tau = 2 ** -8 # 2 ** -8 ~= 5e-3
self.worker_num = 2 # rollout workers number pre GPU (adjust it to get high GPU usage)
self.thread_num = 8 # cpu_num for evaluate model, torch.set_num_threads(self.num_threads)
self.random_seed = 0 # initialize random seed in self.init_before_training()
self.learner_gpus = (0,) # for example: os.environ['CUDA_VISIBLE_DEVICES'] = '0, 2,'
self.workers_gpus = self.learner_gpus # for GPU_VectorEnv (such as isaac gym)
self.ensemble_gpus = None # for example: (learner_gpus0, ...)
self.ensemble_gap = 2 ** 8
self.cwd = None # the directory path to save the model
self.if_remove = True # remove the cwd folder? (True, False, None:ask me)
self.break_step = +np.inf # break training after 'total_step > break_step'
self.if_allow_break = True # allow break training when reach goal (early termination)
self.eval_env = None # the environment for evaluating. None means set automatically.
self.eval_gap = 2 ** 8 # evaluate the agent per eval_gap seconds
self.eval_times1 = 2 ** 2 # number of times that get episode return in first
self.eval_times2 = 2 ** 4 # number of times that get episode return in second
self.eval_gpu_id = None # -1 means use cpu, >=0 means use GPU, None means set as learner_gpus[0]
self.if_overwrite = True # Save policy networks with different episode return or overwrite
def init_before_training(self):
"""
Check parameters before training.
"""
np.random.seed(self.random_seed)
torch.manual_seed(self.random_seed)
torch.set_num_threads(self.thread_num)
torch.set_default_dtype(torch.float32)
'''env'''
assert isinstance(self.env_num, int)
assert isinstance(self.max_step, int)
assert isinstance(self.state_dim, int) or isinstance(self.state_dim, tuple)
assert isinstance(self.action_dim, int) or isinstance(self.action_dim, tuple)
assert isinstance(self.if_discrete, bool)
assert isinstance(self.target_return, int) or isinstance(self.target_return, float)
'''agent'''
assert hasattr(self.agent, 'init')
assert hasattr(self.agent, 'update_net')
assert hasattr(self.agent, 'explore_env')
assert hasattr(self.agent, 'select_actions')
'''auto set'''
if self.cwd is None:
agent_name = self.agent.__class__.__name__
env_name = getattr(self.env, 'env_name', self.env)
self.cwd = f'./{agent_name}_{env_name}_{self.learner_gpus}'
if self.eval_gpu_id is None:
self.eval_gpu_id = self.learner_gpus[0]
'''remove history'''
if self.if_remove is None:
self.if_remove = bool(input(f"| PRESS 'y' to REMOVE: {self.cwd}? ") == 'y')
elif self.if_remove:
import shutil
shutil.rmtree(self.cwd, ignore_errors=True)
print(f"| Remove cwd: {self.cwd}")
else:
print(f"| Keep cwd: {self.cwd}")
os.makedirs(self.cwd, exist_ok=True)
| 45.990099
| 107
| 0.602476
|
b127f3604a1ebc218255f1aed4f1b7449c2d2fda
| 708
|
py
|
Python
|
ip_addresses/admin.py
|
rytis/IP-address-management-tool
|
35531adf85eef369c3ce23ff1e9f9e9fc4e1fb38
|
[
"Apache-2.0"
] | 7
|
2016-04-26T00:51:49.000Z
|
2022-01-18T15:57:24.000Z
|
ip_addresses/admin.py
|
rytis/IP-address-management-tool
|
35531adf85eef369c3ce23ff1e9f9e9fc4e1fb38
|
[
"Apache-2.0"
] | 2
|
2017-12-18T05:50:07.000Z
|
2019-07-10T09:06:23.000Z
|
ip_addresses/admin.py
|
rytis/IP-address-management-tool
|
35531adf85eef369c3ce23ff1e9f9e9fc4e1fb38
|
[
"Apache-2.0"
] | 7
|
2015-09-15T11:19:15.000Z
|
2022-03-17T00:43:29.000Z
|
from www_example_com.ip_addresses.models import *
from django.contrib import admin
class NetworkAddressAdmin(admin.ModelAdmin):
pass
class DHCPNetworkAdmin(admin.ModelAdmin):
pass
class DNSServerAdmin(admin.ModelAdmin):
pass
class DomainNameAdmin(admin.ModelAdmin):
pass
class DHCPAddressPoolAdmin(admin.ModelAdmin):
pass
class ClassRuleAdmin(admin.ModelAdmin):
pass
admin.site.register(NetworkAddress, NetworkAddressAdmin)
admin.site.register(DHCPNetwork, DHCPNetworkAdmin)
admin.site.register(DHCPAddressPool, DHCPAddressPoolAdmin)
admin.site.register(DNSServer, DNSServerAdmin)
admin.site.register(DomainName, DomainNameAdmin)
admin.site.register(ClassRule, ClassRuleAdmin)
| 25.285714
| 58
| 0.820621
|
d4ae3b6549cb3e6e43a5f986ec76d6e1d4e09704
| 2,358
|
py
|
Python
|
hummingbird/ml/operator_converters/__init__.py
|
kernc/hummingbird
|
8c9d5b1f19054d521b22ad7fcffa8ee10e405ac3
|
[
"MIT"
] | null | null | null |
hummingbird/ml/operator_converters/__init__.py
|
kernc/hummingbird
|
8c9d5b1f19054d521b22ad7fcffa8ee10e405ac3
|
[
"MIT"
] | null | null | null |
hummingbird/ml/operator_converters/__init__.py
|
kernc/hummingbird
|
8c9d5b1f19054d521b22ad7fcffa8ee10e405ac3
|
[
"MIT"
] | null | null | null |
# -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
"""
All operators converters are stored under this package.
"""
# Register constants used within Hummingbird converters.
from . import constants as converter_constants
from .. import supported as hummingbird_constants
from .._utils import _Constants
# Add constants in scope.
constants = _Constants(converter_constants, hummingbird_constants)
# To register a converter for scikit-learn API operators, import associated modules here.
from .onnx import onnx_operator # noqa: E402
from .onnx import array_feature_extractor as onnx_afe # noqa: E402, F811
from .onnx import linear as onnx_linear # noqa: E402, F811
from .onnx import normalizer as onnx_normalizer # noqa: E402, F811
from .onnx import one_hot_encoder as onnx_ohe # noqa: E402, F811
from .onnx import scaler as onnx_scaler # noqa: E402, F811
from .onnx import tree_ensemble # noqa: E402
from .sklearn import array_feature_extractor as sklearn_afe # noqa: E402
from .sklearn import decision_tree # noqa: E402
from .sklearn import gbdt # noqa: E402
from .sklearn import iforest # noqa: E402
from .sklearn import linear as sklearn_linear # noqa: E402
from .sklearn import normalizer as sklearn_normalizer # noqa: E402
from .sklearn import one_hot_encoder as sklearn_ohe # noqa: E402
from .sklearn import scaler as sklearn_scaler # noqa: E402
from .sklearn import sv # noqa: E402
from . import lightgbm # noqa: E402
from . import xgb # noqa: E402
__pdoc__ = {}
__pdoc__["hummingbird.operator_converters._array_feature_extractor_implementations"] = True
__pdoc__["hummingbird.operator_converters._gbdt_commons"] = True
__pdoc__["hummingbird.operator_converters._linear_implementations"] = True
__pdoc__["hummingbird.operator_converters._normalizer_implementations"] = True
__pdoc__["hummingbird.operator_converters._one_hot_encoder_implementations"] = True
__pdoc__["hummingbird.operator_converters._scaler_implementations"] = True
__pdoc__["hummingbird.operator_converters._tree_commons"] = True
__pdoc__["hummingbird.operator_converters._tree_implementations"] = True
| 48.122449
| 91
| 0.751484
|
4c8f4350dfa188275a813fac7a94391a7bfd5e1f
| 6,664
|
py
|
Python
|
code/switch-watcher/src/switch-watch.py
|
CC-Digital-Innovation/devops-workshop
|
1498d4382b2cff408856bd049c2b2cdf37cac4e7
|
[
"MIT"
] | null | null | null |
code/switch-watcher/src/switch-watch.py
|
CC-Digital-Innovation/devops-workshop
|
1498d4382b2cff408856bd049c2b2cdf37cac4e7
|
[
"MIT"
] | null | null | null |
code/switch-watcher/src/switch-watch.py
|
CC-Digital-Innovation/devops-workshop
|
1498d4382b2cff408856bd049c2b2cdf37cac4e7
|
[
"MIT"
] | 1
|
2021-04-09T20:53:33.000Z
|
2021-04-09T20:53:33.000Z
|
# libs/modules
import configparser
import difflib
import time
import smtplib
import yaml
from datetime import datetime
from configparser import ExtendedInterpolation
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from netmiko import ConnectHandler
# owned
__author__ = 'Rich Bocchinfuso'
__copyright__ = 'Copyright 2021, Sample switch config diff reporter for DevOps Workshop'
__credits__ = ['Rich Bocchinfuso']
__license__ = 'MIT'
__version__ = '0.1.0'
__maintainer__ = 'Rich Bocchinfuso'
__email__ = 'rbocchinfuso@gmail.com'
__status__ = 'Dev'
# read config paramaters from config.ini file using configparser
# prepare the config file reference
CONFIG = configparser.ConfigParser(interpolation=ExtendedInterpolation())
CONFIG.read('./config.ini')
# prepare SMTP-related variables from the config file
SMTP_SERVER = CONFIG['SMTP Info']['server']
SMTP_PORT = CONFIG['SMTP Info']['port']
SMTP_USERNAME = CONFIG['SMTP Info']['username']
SMTP_PASSWORD = CONFIG['SMTP Info']['password']
# prepare mailer variable from the config file
FROM = CONFIG['Mailer Info']['from']
TO = CONFIG['Mailer Info']['to']
CC = CONFIG['Mailer Info']['cc']
BCC = CONFIG['Mailer Info']['bcc']
ALL = TO + ',' + CC + BCC
# prepare message content variables from the config file
SUBJECT = CONFIG['Message Info']['subject']
# defining the command to send to each device
command = 'show run'
class read_from_files(object):
# defined class and the init function contains initialization for information of virtual switched from successful ssh connection
def __init__(self, name, device_type, ip_address, port, username, password):
self.name = name
self.device_type = device_type
self.ip = ip_address
self.port = port
self.username = username
self.password = password
# defining write function to read configuration information of network devices from yesterday's date and today's date and compare those two files
def write_fromfile(self):
# establishing session to connect to device using SSH
session = ConnectHandler(device_type=self.device_type, ip=self.ip, port=self.port, username=self.username,
password=self.password)
# entering the session
enable = session.enable()
# sending commmand and storing output
output = session.send_command(command)
# ### daily comparison code
# #defining the file from yesterday, for comparison
# old_configfile = '/code/configfiles/' + self.ip + '_' + self.port + '_' + (
# datetime.date.today() - datetime.timedelta(days=1)).isoformat()
# #writing the command to a file for today
# with open('/code/configfiles/' + self.ip + '_' + self.port + '_' + datetime.date.today().isoformat(), 'w') as new_configfile:
# new_configfile.write(output + '\n')
# #extracting differences between yesterday's and todays file in HTML format
# with open(old_configfile, 'r') as old_file, open(
# '/code/configfiles/' + self.ip + '_' + self.port + '_' + datetime.date.today().isoformat(),
# 'r') as new_file:
# compare = difflib.HtmlDiff().make_file(fromlines=old_file.readlines(), tolines=new_file.readlines(),
# fromdesc=(datetime.date.today() - datetime.timedelta(
# days=1)).isoformat(),
# todesc=datetime.date.today().isoformat())
# #sending differences to mail function for forwarding as email
# # read_from_files.toscreen(compare)
# read_from_files.mail(compare)
# basline comparison code
# defining baseline, for comparison
baseline_configfile = '/code/configfiles/baseline.txt'
# writing current config
with open('/code/configfiles/' + self.ip + '_' + self.port, 'w') as current_configfile:
current_configfile.write(output + '\n')
# extracting differences between basline and current config file in HTML format
with open(baseline_configfile, 'r') as baseline_file, open(
'/code/configfiles/' + self.ip + '_' + self.port,
'r') as current_file:
compare = difflib.HtmlDiff().make_file(fromlines=baseline_file.readlines(), tolines=current_file.readlines(),
fromdesc=(
'Baseline Config'),
todesc=datetime.now().isoformat())
# sending differences to mail function for forwarding as email
# read_from_files.toscreen(compare)
read_from_files.mail(name,compare)
# defining function for sending comparison report to screen
# def toscreen(compare):
# print (compare)
# defining function for sending comparison report via email
def mail(name,compare):
msg = MIMEMultipart()
msg['From'] = FROM
msg['To'] = TO
msg['Subject'] = name + ': ' + SUBJECT
msg.attach(MIMEText(compare, 'html'))
try:
server = smtplib.SMTP(SMTP_SERVER, SMTP_PORT)
server.starttls()
server.login(SMTP_USERNAME, SMTP_PASSWORD)
server.sendmail(FROM, TO, msg.as_string())
server.quit()
print("email report sent successfully")
except Exception as e:
print(e)
print('something went wrong...')
# defining information of each virtual switches like device_type,ip,username and password
if __name__ == "__main__":
while(True):
# read switch config details from switches.yml using PyYAML
f = open('switches.yml')
switch_yaml = yaml.safe_load(f)
f.close()
for switch_id in switch_yaml['switches']:
name = switch_yaml['switches'][switch_id]['name']
type = switch_yaml['switches'][switch_id]['type']
address = switch_yaml['switches'][switch_id]['address']
port = switch_yaml['switches'][switch_id]['port']
username = switch_yaml['switches'][switch_id]['username']
password = switch_yaml['switches'][switch_id]['password']
switch = read_from_files(
str(name), str(type), str(address), str(port), str(username), str(password))
# sending information of each virtual device to write function for reading and comparison
read_from_files.write_fromfile(switch)
time.sleep(60)
| 42.993548
| 149
| 0.634604
|
0994dd14991120cebab69729cbe93f34a64330e2
| 4,233
|
py
|
Python
|
samples/lab4_5_samples/AgI_sample.py
|
bkoz37/3320labs
|
8314237587548f06bf6816abf7c9103e06ea4e58
|
[
"MIT"
] | 7
|
2017-02-22T17:17:25.000Z
|
2022-03-28T03:38:46.000Z
|
samples/lab4_5_samples/AgI_sample.py
|
bkoz37/3320labs
|
8314237587548f06bf6816abf7c9103e06ea4e58
|
[
"MIT"
] | null | null | null |
samples/lab4_5_samples/AgI_sample.py
|
bkoz37/3320labs
|
8314237587548f06bf6816abf7c9103e06ea4e58
|
[
"MIT"
] | 5
|
2018-09-18T14:15:14.000Z
|
2022-01-25T09:19:11.000Z
|
import numpy, os
import matplotlib.pyplot as plt
from labutil.plugins.lammps import lammps_run, parse_lammps_rdf, parse_lammps_thermo, get_rdf
from labutil.objects import File, Struc, Dir, ase2struc, ClassicalPotential
from ase.spacegroup import crystal
from ase.build import make_supercell
from ase import Atoms
intemplate = """
# ---------- Initialize simulation ---------------------
units metal
atom_style atomic
dimension 3
boundary p p p
newton off
read_data $DATAINPUT
pair_style mff
pair_coeff * * /home/bond/Work/Lab4/AgI_FF.txt 47 53 yes yes
velocity all create $TEMPERATURE 126342 dist gaussian rot yes mom yes
group silvers type 1
group iodines type 2
# ---------- Describe computed properties------------------
compute msdAg silvers msd com yes
compute msdI iodines msd com yes
compute rdfAg all rdf 1000 1 1
compute rdfI all rdf 1000 2 2
compute rdfAgI all rdf 1000 1 2
variable rdfAgFile string "$RDFFILE.Ag"
variable rdfIFile string "$RDFFILE.I"
variable rdfAgIFile string "$RDFFILE.AgI"
thermo_style custom step temp etotal press density c_msdAg[4] c_msdI[4]
thermo $TOUTPUT
# record rdf
fix 1 all ave/time 1 $RDFFRAME $RDFFRAME c_rdfAg[*] file ${rdfAgFile} mode vector
fix 2 all ave/time 1 $RDFFRAME $RDFFRAME c_rdfI[*] file ${rdfIFile} mode vector
fix 3 all ave/time 1 $RDFFRAME $RDFFRAME c_rdfAgI[*] file ${rdfAgIFile} mode vector
# ---------- Specify ensemble ---------------------
fix 4 all npt temp $TEMPERATURE $TEMPERATURE $TDAMP tri 0.0 0.0 1.0
# --------- Run -------------
timestep $TIMESTEP
run $NSTEPS
unfix 4
"""
def make_struc(size):
"""
Creates the crystal structure using ASE.
:param size: supercell multiplier
:return: structure object converted from ase
"""
alat = 5.1
lattice = alat * numpy.identity(3)
symbols = ['Ag', 'I', 'Ag', 'I']
sc_positions = [[1/2, 0, 1/4], [0, 0, 0], [1, 1/2, 3/4], [1/2, 1/2, 1/2]]
unitcell = Atoms(symbols=symbols, scaled_positions=sc_positions, cell=lattice)
multiplier = numpy.identity(3) * size
supercell = make_supercell(unitcell, multiplier)
structure = Struc(ase2struc(supercell))
return structure
def compute_AgI_dynamics(size, timestep, nsteps, temperature, ncpu):
"""
Make an input template and select potential and structure, and input parameters.
Return a pair of output file and RDF file written to the runpath directory.
"""
potential = ClassicalPotential(ptype='eam', element='Al', name='Al_zhou.eam.alloy')
runpath = Dir(path=os.path.join(os.environ['WORKDIR'], "Lab4/Problem2", "temp_" + str(temperature)))
struc = make_struc(size=size)
inparam = {
'TEMPERATURE': temperature,
'NSTEPS': nsteps,
'TIMESTEP': timestep,
'TOUTPUT': 100, # how often to write thermo output
'TDAMP': 50 * timestep, # thermostat damping time scale
'RDFFRAME': int(nsteps / 4), # frames for radial distribution function
}
outfile = lammps_run(struc=struc, runpath=runpath, potential=potential,
intemplate=intemplate, inparam=inparam, ncpu=ncpu, triclinic=True)
output = parse_lammps_thermo(outfile=outfile)
rdfAgFile = File(path=os.path.join(runpath.path, 'lammps.rdf.Ag'))
rdfIFile = File(path=os.path.join(runpath.path, 'lammps.rdf.I'))
rdfAgIFile = File(path=os.path.join(runpath.path, 'lammps.rdf.AgI'))
rdfsAg = parse_lammps_rdf(rdffile=rdfAgFile)
rdfsI = parse_lammps_rdf(rdffile=rdfIFile)
rdfsAgI = parse_lammps_rdf(rdffile=rdfAgIFile)
return output, rdfsAg, rdfsI, rdfsAgI
def md_run():
output, rdfsAg, rdfsI, rdfsAgI = compute_AgI_dynamics(size=1, timestep=0.001, nsteps=1000, temperature=300, ncpu=1)
[simtime, temp, etotal, press, dens, msdAg, msdI] = output
## ------- plot output properties
plt.plot(simtime, temp)
plt.show()
# ----- plot radial distribution functions
for rdf in rdfsAgI:
plt.plot(rdf[0], rdf[1])
plt.show()
if __name__ == '__main__':
# put here the function that you actually want to run
md_run()
| 35.571429
| 119
| 0.660524
|
a14401c642d0cde3d760cfcf88f05fccc59c9a6b
| 456
|
py
|
Python
|
L1TriggerConfig/GMTConfigProducers/python/L1MuGMTParametersOnlineProducer_cfi.py
|
ckamtsikis/cmssw
|
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
|
[
"Apache-2.0"
] | 852
|
2015-01-11T21:03:51.000Z
|
2022-03-25T21:14:00.000Z
|
L1TriggerConfig/GMTConfigProducers/python/L1MuGMTParametersOnlineProducer_cfi.py
|
ckamtsikis/cmssw
|
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
|
[
"Apache-2.0"
] | 30,371
|
2015-01-02T00:14:40.000Z
|
2022-03-31T23:26:05.000Z
|
L1TriggerConfig/GMTConfigProducers/python/L1MuGMTParametersOnlineProducer_cfi.py
|
ckamtsikis/cmssw
|
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
|
[
"Apache-2.0"
] | 3,240
|
2015-01-02T05:53:18.000Z
|
2022-03-31T17:24:21.000Z
|
import FWCore.ParameterSet.Config as cms
L1MuGMTParametersOnlineProducer = cms.ESProducer("L1MuGMTParametersOnlineProducer",
onlineDB = cms.string("oracle://CMS_OMDS_LB/CMS_TRG_R"),
onlineAuthentication = cms.string("."),
forceGeneration = cms.bool(False),
ignoreVersionMismatch = cms.bool(False)
)
| 50.666667
| 95
| 0.530702
|
8cf69f5e3dd744e63398bad0dba62d70b506fb91
| 4,755
|
py
|
Python
|
kube_hunter/modules/discovery/apiserver.py
|
austinhuntinghpe/kube-hunter
|
81ba0bf52781fe44f650aa90837416a15174d46b
|
[
"Apache-2.0"
] | 1
|
2021-09-13T21:52:52.000Z
|
2021-09-13T21:52:52.000Z
|
kube_hunter/modules/discovery/apiserver.py
|
austinhuntinghpe/kube-hunter
|
81ba0bf52781fe44f650aa90837416a15174d46b
|
[
"Apache-2.0"
] | 2
|
2021-05-20T20:17:17.000Z
|
2022-02-26T09:20:16.000Z
|
kube_hunter/modules/discovery/apiserver.py
|
austinhuntinghpe/kube-hunter
|
81ba0bf52781fe44f650aa90837416a15174d46b
|
[
"Apache-2.0"
] | 1
|
2020-07-29T07:50:10.000Z
|
2020-07-29T07:50:10.000Z
|
import json
import requests
import logging
from kube_hunter.core.types import Discovery
from kube_hunter.core.events import handler
from kube_hunter.core.events.types import OpenPortEvent, Service, Event, EventFilterBase
KNOWN_API_PORTS = [443, 6443, 8080]
class K8sApiService(Service, Event):
"""A Kubernetes API service"""
def __init__(self, protocol="https"):
Service.__init__(self, name="Unrecognized K8s API")
self.protocol = protocol
class ApiServer(Service, Event):
"""The API server is in charge of all operations on the cluster."""
def __init__(self):
Service.__init__(self, name="API Server")
self.protocol = "https"
class MetricsServer(Service, Event):
"""The Metrics server is in charge of providing resource usage metrics for pods and nodes to the API server."""
def __init__(self):
Service.__init__(self, name="Metrics Server")
self.protocol = "https"
# Other devices could have this port open, but we can check to see if it looks like a Kubernetes api
# A Kubernetes API service will respond with a JSON message that includes a "code" field for the HTTP status code
@handler.subscribe(OpenPortEvent, predicate=lambda x: x.port in KNOWN_API_PORTS)
class ApiServiceDiscovery(Discovery):
"""API Service Discovery
Checks for the existence of K8s API Services
"""
def __init__(self, event):
self.event = event
self.session = requests.Session()
self.session.verify = False
def execute(self):
logging.debug("Attempting to discover an API service on {}:{}".format(self.event.host, self.event.port))
protocols = ["http", "https"]
for protocol in protocols:
if self.has_api_behaviour(protocol):
self.publish_event(K8sApiService(protocol))
def has_api_behaviour(self, protocol):
try:
r = self.session.get("{}://{}:{}".format(protocol, self.event.host, self.event.port))
if ('k8s' in r.text) or ('"code"' in r.text and r.status_code != 200):
return True
except requests.exceptions.SSLError:
logging.debug("{} protocol not accepted on {}:{}".format(protocol, self.event.host, self.event.port))
except Exception as e:
logging.debug("{} on {}:{}".format(e, self.event.host, self.event.port))
# Acts as a Filter for services, In the case that we can classify the API,
# We swap the filtered event with a new corresponding Service to next be published
# The classification can be regarding the context of the execution,
# Currently we classify: Metrics Server and Api Server
# If running as a pod:
# We know the Api server IP, so we can classify easily
# If not:
# We determine by accessing the /version on the service.
# Api Server will contain a major version field, while the Metrics will not
@handler.subscribe(K8sApiService)
class ApiServiceClassify(EventFilterBase):
"""API Service Classifier
Classifies an API service
"""
def __init__(self, event):
self.event = event
self.classified = False
self.session = requests.Session()
self.session.verify = False
# Using the auth token if we can, for the case that authentication is needed for our checks
if self.event.auth_token:
self.session.headers.update({"Authorization": "Bearer {}".format(self.event.auth_token)})
def classify_using_version_endpoint(self):
"""Tries to classify by accessing /version. if could not access succeded, returns"""
try:
r = self.session.get("{}://{}:{}/version".format(self.event.protocol, self.event.host, self.event.port))
versions = r.json()
if 'major' in versions:
if versions.get('major') == "":
self.event = MetricsServer()
else:
self.event = ApiServer()
except Exception as e:
logging.exception("Could not access /version on API service")
def execute(self):
discovered_protocol = self.event.protocol
# if running as pod
if self.event.kubeservicehost:
# if the host is the api server's IP, we know it's the Api Server
if self.event.kubeservicehost == str(self.event.host):
self.event = ApiServer()
else:
self.event = MetricsServer()
# if not running as pod.
else:
self.classify_using_version_endpoint()
# in any case, making sure to link previously discovered protocol
self.event.protocol = discovered_protocol
# If some check classified the Service,
# the event will have been replaced.
return self.event
| 41.347826
| 116
| 0.659096
|
aa8c60e9e0dfdbab2e7fe42a967da58d82cf86a8
| 324
|
py
|
Python
|
owtf/plugins/network/active/smb@PTES-009.py
|
alienus/owtf
|
b6d81fac83c324c2b8c6fe2a974c036881c1fcd0
|
[
"BSD-3-Clause"
] | null | null | null |
owtf/plugins/network/active/smb@PTES-009.py
|
alienus/owtf
|
b6d81fac83c324c2b8c6fe2a974c036881c1fcd0
|
[
"BSD-3-Clause"
] | null | null | null |
owtf/plugins/network/active/smb@PTES-009.py
|
alienus/owtf
|
b6d81fac83c324c2b8c6fe2a974c036881c1fcd0
|
[
"BSD-3-Clause"
] | null | null | null |
"""
Plugin for probing SMB
"""
from owtf.managers.resource import get_resources
from owtf.plugin.plugin_helper import plugin_helper
DESCRIPTION = " SMB Probing "
def run(PluginInfo):
resource = get_resources('SmbProbeMethods')
return plugin_helper.CommandDump('Test Command', 'Output', resource, PluginInfo, [])
| 23.142857
| 88
| 0.759259
|
a7e992e2831e2f244380457fb13b077ec1ae2ffe
| 2,425
|
py
|
Python
|
src/example.py
|
RedSpiderMkV/FinanceMarketDataGrabber
|
f6dc916471a73a6dd86aac0776c6dfa56fa1f6f5
|
[
"MIT"
] | 46
|
2015-10-15T21:34:40.000Z
|
2022-03-27T13:42:43.000Z
|
src/example.py
|
RedSpiderMkV/FinanceMarketDataGrabber
|
f6dc916471a73a6dd86aac0776c6dfa56fa1f6f5
|
[
"MIT"
] | null | null | null |
src/example.py
|
RedSpiderMkV/FinanceMarketDataGrabber
|
f6dc916471a73a6dd86aac0776c6dfa56fa1f6f5
|
[
"MIT"
] | 19
|
2015-10-15T21:35:52.000Z
|
2020-02-26T15:08:54.000Z
|
#!/usr/bin/python
#-------------------------------------------------------------------------------
# Name: ..
# Purpose:
#
# Author: RedSpiderMkV
#
# Created: 15/10/2015
# Copyright: (c) RedSpiderMkV 2015
# Licence: <your licence>
#-------------------------------------------------------------------------------
import FinanceDataLib.YahooFinance.YahooApi as YahooApi
from FinanceDataLib.YahooFinance.YahooApi_Symbols import *
import FinanceDataLib.GoogleFinance.GoogleApi as GoogleApi
from FinanceDataLib.GoogleFinance.GoogleApi_Symbols import *
import HistoricDataLib.HistoricalQuote_GoogleAPI as HistoricalQuote_GoogleAPI
import HistoricDataLib.HistoricalQuote_YahooAPI as HistoricalQuote_YahooAPI
stockList = ('MSFT', 'AAPL', 'BARC.L', '^FTSE', '^FTAI', '^NYA')
forexList = ('EURUSD=X', 'GBPUSD=X', 'EURGBP=X', 'USDJPY=X', 'BTCGBP=x')
def printLines(lst):
for line in lst:
parts = line.split(',')
p = ''
for part in parts:
p = p + part.ljust(8) + '\t'
print(p)
def printYahooQuotes():
financeApi = YahooApi.yahooFinance()
symbolList = (SymbolInfo.Symbol, SymbolInfo.StockExchange, SymbolInfo.Name, \
Pricing.Ask, Pricing.Bid, Pricing.LastTradeWithTime)
print(financeApi.GetData(stockList, symbolList))
print(financeApi.GetData(forexList, symbolList))
def printGoogleQuotes():
api = GoogleApi.GoogleFinanceApi()
symbolList = (GoogleStockSymbols.ID, GoogleStockSymbols.Index, \
GoogleStockSymbols.StockSymbol, GoogleStockSymbols.LastTradePrice)
print(api.GetStockData(stockList, symbolList))
def printHistoricQuotesGoogle():
data = HistoricalQuote_GoogleAPI.HistoricalQuote()
printLines(data.GetData('NASDAQ:AAPL', '6-18-2014', '6-19-2014'))
def printHistoricQuotesYahoo():
data = HistoricalQuote_YahooAPI.HistoricalQuote()
printLines(data.GetData('AAPL', '6-18-14', '6-19-14'))
#printLines(data.GetData('^FTSE', '01-01-16', '11-01-16'))
def main():
print('Yahoo Quotes')
print('------------')
printYahooQuotes()
print('Google Quotes')
print('------------')
printGoogleQuotes()
print('')
print('Yahoo Historic Quotes')
print('------------')
printHistoricQuotesYahoo()
print('')
print('Google Historic Quotes')
print('------------')
printHistoricQuotesGoogle()
if __name__ == '__main__':
main()
| 29.938272
| 81
| 0.629691
|
7b9385e7a59d2677a3ef6851a572001312b36886
| 9,933
|
py
|
Python
|
qiskit/aqua/components/uncertainty_problems/multivariate_problem.py
|
chunfuchen/aqua
|
fde435203a2799433a4e50897554fa226c8ff1dc
|
[
"Apache-2.0"
] | null | null | null |
qiskit/aqua/components/uncertainty_problems/multivariate_problem.py
|
chunfuchen/aqua
|
fde435203a2799433a4e50897554fa226c8ff1dc
|
[
"Apache-2.0"
] | null | null | null |
qiskit/aqua/components/uncertainty_problems/multivariate_problem.py
|
chunfuchen/aqua
|
fde435203a2799433a4e50897554fa226c8ff1dc
|
[
"Apache-2.0"
] | 2
|
2020-02-13T02:17:58.000Z
|
2020-08-09T07:56:25.000Z
|
# -*- coding: utf-8 -*-
# This code is part of Qiskit.
#
# (C) Copyright IBM 2019.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
"""
Multivariate Uncertainty Problem.
"""
import numpy as np
from qiskit.aqua.components.uncertainty_problems import UncertaintyProblem
class MultivariateProblem(UncertaintyProblem):
"""
Multivariate Uncertainty Problem.
"""
def __init__(self,
uncertainty_model,
aggregation_function,
univariate_objective,
conditions=None):
"""
Constructor.
Args:
uncertainty_model (MultivariateDistribution): multivariate uncertainty model
aggregation_function (CircuitFactory): aggregation function that maps
the multiple dimension to an aggregated value
univariate_objective (UnivariatePiecewiseLinearObjective): objective
function applied to the aggregated value
conditions (Union(list, numpy.ndarray)): list of pairs (int, CircuitFactory) =
target dimension of uncertainty model and condition to be satisfied
to apply the aggregation
"""
# determine number of target qubits
num_target_qubits = uncertainty_model.num_target_qubits + 1
super().__init__(num_target_qubits)
# set qubit indices for
self.i_state = list(range(num_target_qubits - 1))
self.i_objective = num_target_qubits - 1
# store parameters
self._uncertainty_model = uncertainty_model
self._aggregation_function = aggregation_function
self._univariate_objective = univariate_objective
self._conditions = conditions
def value_to_estimation(self, value):
if hasattr(self._univariate_objective, 'value_to_estimation'):
return self._univariate_objective.value_to_estimation(value)
else:
return value
def required_ancillas(self):
num_condition_ancillas = 0
num_condition_target_ancillas = 0
num_aggregation_ancillas = self._aggregation_function.required_ancillas()
if self._conditions is not None:
num_condition_target_ancillas = len(self._conditions) + 1*(len(self._conditions) > 1)
num_aggregation_ancillas = self._aggregation_function.required_ancillas_controlled()
if self._conditions is not None:
for _, condition in self._conditions:
num_condition_ancillas = np.maximum(num_condition_ancillas,
condition.required_ancillas())
# get maximal number of required ancillas
num_ancillas = max([self._uncertainty_model.required_ancillas(),
num_aggregation_ancillas,
self._univariate_objective.required_ancillas(),
num_condition_ancillas])
# add ancillas that are required to compute intermediate
# states are are no directly uncomputed
num_ancillas += self._aggregation_function.num_sum_qubits
num_ancillas += num_condition_target_ancillas
return num_ancillas
def build(self, qc, q, q_ancillas=None, params=None):
# apply uncertainty model (can use all ancillas and returns all clean)
q_state = [q[i] for i in self.i_state]
self._uncertainty_model.build(qc, q_state, q_ancillas)
qc.barrier()
# get all qubits up to the largest state qubit
num_agg_qubits = self._aggregation_function.num_sum_qubits
q_agg_in = q_state
q_agg_out = [q_ancillas[i] for i in range(num_agg_qubits)]
q_agg = q_agg_in + q_agg_out
# determine objective qubits (aggregation qubits + objective qubit)
q_obj = q_agg_out + [q[self.i_objective]]
# set condition target qubits
if self._conditions:
i_cond_start = num_agg_qubits
i_cond_end = i_cond_start + len(self._conditions) + 1*(len(self._conditions) > 1)
q_cond_target = [q_ancillas[i] for i in range(i_cond_start, i_cond_end)]
# set remaining ancillas
remaining_ancillas_start = i_cond_end
else:
# set remaining ancillas
remaining_ancillas_start = num_agg_qubits
q_rem_ancillas = [q_ancillas[i] for i in range(remaining_ancillas_start, len(q_ancillas))]
# apply controlled or uncontrolled aggregation
if not self._conditions:
# apply aggregation
self._aggregation_function.build(qc, q_agg, q_rem_ancillas)
qc.barrier()
# apply objective function
self._univariate_objective.build(qc, q_obj, q_rem_ancillas)
qc.barrier()
# uncompute aggregation (all ancillas should be clean again now)
self._aggregation_function.build_inverse(qc, q_agg, q_rem_ancillas)
qc.barrier()
else:
if len(self._conditions) == 1:
dimension = self._conditions[0][0]
condition = self._conditions[0][1]
i_condition_in_start = \
np.cumsum(self._uncertainty_model.num_qubits)[dimension] - \
self._uncertainty_model.num_qubits[dimension]
i_condition_in_end = np.cumsum(self._uncertainty_model.num_qubits)[dimension]
q_condition_in = \
[q_state[i] for i in range(i_condition_in_start, i_condition_in_end)]
q_condition = q_condition_in + [q_cond_target[0]]
condition.build(qc, q_condition, q_rem_ancillas)
qc.barrier()
# apply aggregation
self._aggregation_function.build_controlled(qc,
q_agg,
q_cond_target[0],
q_rem_ancillas,
use_basis_gates=False)
qc.barrier()
# apply objective function
self._univariate_objective.build(qc, q_obj, q_rem_ancillas)
qc.barrier()
# uncompute aggregation (all ancillas should be clean again now)
self._aggregation_function.build_controlled_inverse(qc,
q_agg,
q_cond_target[0],
q_rem_ancillas,
use_basis_gates=False)
qc.barrier()
# uncompute condition
condition.build_inverse(qc, q_condition, q_rem_ancillas)
else:
for j in range(len(self._conditions)):
dimension = self._conditions[j][0]
condition = self._conditions[j][1]
i_condition_in_start = \
np.cumsum(self._uncertainty_model.num_qubits)[dimension] - \
self._uncertainty_model.num_qubits[dimension]
i_condition_in_end = np.cumsum(self._uncertainty_model.num_qubits)[dimension]
q_condition_in = \
[q_state[i] for i in range(i_condition_in_start, i_condition_in_end)]
q_condition = q_condition_in + [q_cond_target[j]]
condition.build(qc, q_condition, q_rem_ancillas)
qc.mct(q_cond_target[:-1], q_cond_target[-1], q_rem_ancillas)
qc.barrier()
# apply aggregation
self._aggregation_function.build_controlled(qc,
q_agg,
q_cond_target[-1],
q_rem_ancillas, use_basis_gates=False)
qc.barrier()
# apply objective function
self._univariate_objective.build(qc, q_obj, q_rem_ancillas)
qc.barrier()
# uncompute aggregation (all ancillas should be clean again now)
self._aggregation_function.build_controlled_inverse(qc, q_agg, q_cond_target[-1],
q_rem_ancillas,
use_basis_gates=False)
qc.barrier()
qc.mct(q_cond_target[:-1], q_cond_target[-1], q_rem_ancillas)
# uncompute condition
for j in range(len(self._conditions)):
dimension = self._conditions[j][0]
condition = self._conditions[j][1]
i_condition_in_start = \
np.cumsum(self._uncertainty_model.num_qubits)[dimension] - \
self._uncertainty_model.num_qubits[dimension]
i_condition_in_end = np.cumsum(self._uncertainty_model.num_qubits)[dimension]
q_condition_in = \
[q_state[i] for i in range(i_condition_in_start, i_condition_in_end)]
q_condition = q_condition_in + [q_cond_target[j]]
condition.build_inverse(qc, q_condition, q_rem_ancillas)
| 39.891566
| 98
| 0.574852
|
08ec0e6b34b934e46714352153db85486079466b
| 554
|
py
|
Python
|
waespk/core/migrations/0027_auto_20160603_1718.py
|
spketoundi/CamODI
|
f5df22bf6a771278294e7af21147d61e5cd19bf5
|
[
"MIT"
] | null | null | null |
waespk/core/migrations/0027_auto_20160603_1718.py
|
spketoundi/CamODI
|
f5df22bf6a771278294e7af21147d61e5cd19bf5
|
[
"MIT"
] | null | null | null |
waespk/core/migrations/0027_auto_20160603_1718.py
|
spketoundi/CamODI
|
f5df22bf6a771278294e7af21147d61e5cd19bf5
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by Django 1.9.2 on 2016-06-03 16:18
from __future__ import unicode_literals
from django.db import migrations
import wagtail.wagtailcore.fields
class Migration(migrations.Migration):
dependencies = [
('ossuo', '0026_merge'),
]
operations = [
migrations.AlterField(
model_name='blogpage',
name='intro',
field=wagtail.wagtailcore.fields.RichTextField(blank=True, verbose_name='Intro (used for blog index and Planet Drupal listings)'),
),
]
| 25.181818
| 142
| 0.65343
|
77339469ac06ead22cde86065cd53b1258155a50
| 243
|
py
|
Python
|
tests/test_hello.py
|
SweptSource/python-example
|
720656b15fa57472b885feb32d3735c336398411
|
[
"MIT"
] | null | null | null |
tests/test_hello.py
|
SweptSource/python-example
|
720656b15fa57472b885feb32d3735c336398411
|
[
"MIT"
] | null | null | null |
tests/test_hello.py
|
SweptSource/python-example
|
720656b15fa57472b885feb32d3735c336398411
|
[
"MIT"
] | null | null | null |
import hello
def test_says_world():
assert hello.say_what() == 'world'
def test_empty():
assert len(hello.say_what()) != ''
def test_1():
assert 1==1
def test_2():
assert 2+2==4
def test_null():
assert 2+2!=None
| 13.5
| 38
| 0.604938
|
415071f9c2019fbe30136c3fdb5edbee585d2b54
| 823
|
py
|
Python
|
table.py
|
misterpah/ldtp_adapter
|
df9cd0186cf51174ea131943f8547a9df079035b
|
[
"MIT"
] | null | null | null |
table.py
|
misterpah/ldtp_adapter
|
df9cd0186cf51174ea131943f8547a9df079035b
|
[
"MIT"
] | null | null | null |
table.py
|
misterpah/ldtp_adapter
|
df9cd0186cf51174ea131943f8547a9df079035b
|
[
"MIT"
] | null | null | null |
def checkrow(window,object):
log("{} :not implemented yet".format(sys._getframe().f_code.co_name))
handle = getobjecthandle(window,object)
if pyacc.checkObjectState(handle['handle'],pyatspi.STATE_CHECKED) == False:
click(window,object)
else:
mousemove(window,object)
return pyacc.checkObjectState(handle['handle'],pyatspi.STATE_CHECKED)
def uncheckrow(window,object):
log("{} :not implemented yet".format(sys._getframe().f_code.co_name))
handle = getobjecthandle(window,object)
if pyacc.checkObjectState(handle['handle'],pyatspi.STATE_CHECKED) == True:
click(window,object)
else:
mousemove(window,object)
ret = True
result = pyacc.checkObjectState(handle['handle'],pyatspi.STATE_CHECKED)
if result == True:
ret = False
return ret
| 37.409091
| 79
| 0.696233
|
6b0fc7b692eaef756ba08b61762d7575c855d5d2
| 29,696
|
py
|
Python
|
homeassistant/components/zwave_js/config_flow.py
|
switschel/core
|
0ecca246bdc3028c30bf8ccbf2b4c7f2a8b3f9aa
|
[
"Apache-2.0"
] | null | null | null |
homeassistant/components/zwave_js/config_flow.py
|
switschel/core
|
0ecca246bdc3028c30bf8ccbf2b4c7f2a8b3f9aa
|
[
"Apache-2.0"
] | 36
|
2020-07-19T13:24:30.000Z
|
2022-03-31T06:02:01.000Z
|
homeassistant/components/zwave_js/config_flow.py
|
switschel/core
|
0ecca246bdc3028c30bf8ccbf2b4c7f2a8b3f9aa
|
[
"Apache-2.0"
] | 1
|
2020-12-13T08:27:33.000Z
|
2020-12-13T08:27:33.000Z
|
"""Config flow for Z-Wave JS integration."""
from __future__ import annotations
from abc import abstractmethod
import asyncio
import logging
from typing import Any
import aiohttp
from async_timeout import timeout
import voluptuous as vol
from zwave_js_server.version import VersionInfo, get_server_version
from homeassistant import config_entries, exceptions
from homeassistant.components import usb
from homeassistant.components.hassio import is_hassio
from homeassistant.const import CONF_NAME, CONF_URL
from homeassistant.core import HomeAssistant, callback
from homeassistant.data_entry_flow import (
AbortFlow,
FlowHandler,
FlowManager,
FlowResult,
)
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from . import disconnect_client
from .addon import AddonError, AddonInfo, AddonManager, AddonState, get_addon_manager
from .const import (
CONF_ADDON_DEVICE,
CONF_ADDON_EMULATE_HARDWARE,
CONF_ADDON_LOG_LEVEL,
CONF_ADDON_NETWORK_KEY,
CONF_INTEGRATION_CREATED_ADDON,
CONF_NETWORK_KEY,
CONF_USB_PATH,
CONF_USE_ADDON,
DOMAIN,
)
_LOGGER = logging.getLogger(__name__)
DEFAULT_URL = "ws://localhost:3000"
TITLE = "Z-Wave JS"
ADDON_SETUP_TIMEOUT = 5
ADDON_SETUP_TIMEOUT_ROUNDS = 4
CONF_EMULATE_HARDWARE = "emulate_hardware"
CONF_LOG_LEVEL = "log_level"
SERVER_VERSION_TIMEOUT = 10
ADDON_LOG_LEVELS = {
"error": "Error",
"warn": "Warn",
"info": "Info",
"verbose": "Verbose",
"debug": "Debug",
"silly": "Silly",
}
ADDON_USER_INPUT_MAP = {
CONF_ADDON_DEVICE: CONF_USB_PATH,
CONF_ADDON_NETWORK_KEY: CONF_NETWORK_KEY,
CONF_ADDON_LOG_LEVEL: CONF_LOG_LEVEL,
CONF_ADDON_EMULATE_HARDWARE: CONF_EMULATE_HARDWARE,
}
ON_SUPERVISOR_SCHEMA = vol.Schema({vol.Optional(CONF_USE_ADDON, default=True): bool})
def get_manual_schema(user_input: dict[str, Any]) -> vol.Schema:
"""Return a schema for the manual step."""
default_url = user_input.get(CONF_URL, DEFAULT_URL)
return vol.Schema({vol.Required(CONF_URL, default=default_url): str})
def get_on_supervisor_schema(user_input: dict[str, Any]) -> vol.Schema:
"""Return a schema for the on Supervisor step."""
default_use_addon = user_input[CONF_USE_ADDON]
return vol.Schema({vol.Optional(CONF_USE_ADDON, default=default_use_addon): bool})
async def validate_input(hass: HomeAssistant, user_input: dict) -> VersionInfo:
"""Validate if the user input allows us to connect."""
ws_address = user_input[CONF_URL]
if not ws_address.startswith(("ws://", "wss://")):
raise InvalidInput("invalid_ws_url")
try:
return await async_get_version_info(hass, ws_address)
except CannotConnect as err:
raise InvalidInput("cannot_connect") from err
async def async_get_version_info(hass: HomeAssistant, ws_address: str) -> VersionInfo:
"""Return Z-Wave JS version info."""
try:
async with timeout(SERVER_VERSION_TIMEOUT):
version_info: VersionInfo = await get_server_version(
ws_address, async_get_clientsession(hass)
)
except (asyncio.TimeoutError, aiohttp.ClientError) as err:
# We don't want to spam the log if the add-on isn't started
# or takes a long time to start.
_LOGGER.debug("Failed to connect to Z-Wave JS server: %s", err)
raise CannotConnect from err
return version_info
class BaseZwaveJSFlow(FlowHandler):
"""Represent the base config flow for Z-Wave JS."""
def __init__(self) -> None:
"""Set up flow instance."""
self.network_key: str | None = None
self.usb_path: str | None = None
self.ws_address: str | None = None
self.restart_addon: bool = False
# If we install the add-on we should uninstall it on entry remove.
self.integration_created_addon = False
self.install_task: asyncio.Task | None = None
self.start_task: asyncio.Task | None = None
self.version_info: VersionInfo | None = None
@property
@abstractmethod
def flow_manager(self) -> FlowManager:
"""Return the flow manager of the flow."""
async def async_step_install_addon(
self, user_input: dict[str, Any] | None = None
) -> FlowResult:
"""Install Z-Wave JS add-on."""
if not self.install_task:
self.install_task = self.hass.async_create_task(self._async_install_addon())
return self.async_show_progress(
step_id="install_addon", progress_action="install_addon"
)
try:
await self.install_task
except AddonError as err:
self.install_task = None
_LOGGER.error(err)
return self.async_show_progress_done(next_step_id="install_failed")
self.integration_created_addon = True
self.install_task = None
return self.async_show_progress_done(next_step_id="configure_addon")
async def async_step_install_failed(
self, user_input: dict[str, Any] | None = None
) -> FlowResult:
"""Add-on installation failed."""
return self.async_abort(reason="addon_install_failed")
async def async_step_start_addon(
self, user_input: dict[str, Any] | None = None
) -> FlowResult:
"""Start Z-Wave JS add-on."""
if not self.start_task:
self.start_task = self.hass.async_create_task(self._async_start_addon())
return self.async_show_progress(
step_id="start_addon", progress_action="start_addon"
)
try:
await self.start_task
except (CannotConnect, AddonError, AbortFlow) as err:
self.start_task = None
_LOGGER.error(err)
return self.async_show_progress_done(next_step_id="start_failed")
self.start_task = None
return self.async_show_progress_done(next_step_id="finish_addon_setup")
async def async_step_start_failed(
self, user_input: dict[str, Any] | None = None
) -> FlowResult:
"""Add-on start failed."""
return self.async_abort(reason="addon_start_failed")
async def _async_start_addon(self) -> None:
"""Start the Z-Wave JS add-on."""
addon_manager: AddonManager = get_addon_manager(self.hass)
self.version_info = None
try:
if self.restart_addon:
await addon_manager.async_schedule_restart_addon()
else:
await addon_manager.async_schedule_start_addon()
# Sleep some seconds to let the add-on start properly before connecting.
for _ in range(ADDON_SETUP_TIMEOUT_ROUNDS):
await asyncio.sleep(ADDON_SETUP_TIMEOUT)
try:
if not self.ws_address:
discovery_info = await self._async_get_addon_discovery_info()
self.ws_address = (
f"ws://{discovery_info['host']}:{discovery_info['port']}"
)
self.version_info = await async_get_version_info(
self.hass, self.ws_address
)
except (AbortFlow, CannotConnect) as err:
_LOGGER.debug(
"Add-on not ready yet, waiting %s seconds: %s",
ADDON_SETUP_TIMEOUT,
err,
)
else:
break
else:
raise CannotConnect("Failed to start Z-Wave JS add-on: timeout")
finally:
# Continue the flow after show progress when the task is done.
self.hass.async_create_task(
self.flow_manager.async_configure(flow_id=self.flow_id)
)
@abstractmethod
async def async_step_configure_addon(
self, user_input: dict[str, Any] | None = None
) -> FlowResult:
"""Ask for config for Z-Wave JS add-on."""
@abstractmethod
async def async_step_finish_addon_setup(
self, user_input: dict[str, Any] | None = None
) -> FlowResult:
"""Prepare info needed to complete the config entry.
Get add-on discovery info and server version info.
Set unique id and abort if already configured.
"""
async def _async_get_addon_info(self) -> AddonInfo:
"""Return and cache Z-Wave JS add-on info."""
addon_manager: AddonManager = get_addon_manager(self.hass)
try:
addon_info: AddonInfo = await addon_manager.async_get_addon_info()
except AddonError as err:
_LOGGER.error(err)
raise AbortFlow("addon_info_failed") from err
return addon_info
async def _async_set_addon_config(self, config: dict) -> None:
"""Set Z-Wave JS add-on config."""
addon_manager: AddonManager = get_addon_manager(self.hass)
try:
await addon_manager.async_set_addon_options(config)
except AddonError as err:
_LOGGER.error(err)
raise AbortFlow("addon_set_config_failed") from err
async def _async_install_addon(self) -> None:
"""Install the Z-Wave JS add-on."""
addon_manager: AddonManager = get_addon_manager(self.hass)
try:
await addon_manager.async_schedule_install_addon()
finally:
# Continue the flow after show progress when the task is done.
self.hass.async_create_task(
self.flow_manager.async_configure(flow_id=self.flow_id)
)
async def _async_get_addon_discovery_info(self) -> dict:
"""Return add-on discovery info."""
addon_manager: AddonManager = get_addon_manager(self.hass)
try:
discovery_info_config = await addon_manager.async_get_addon_discovery_info()
except AddonError as err:
_LOGGER.error(err)
raise AbortFlow("addon_get_discovery_info_failed") from err
return discovery_info_config
class ConfigFlow(BaseZwaveJSFlow, config_entries.ConfigFlow, domain=DOMAIN):
"""Handle a config flow for Z-Wave JS."""
VERSION = 1
def __init__(self) -> None:
"""Set up flow instance."""
super().__init__()
self.use_addon = False
self._title: str | None = None
@property
def flow_manager(self) -> config_entries.ConfigEntriesFlowManager:
"""Return the correct flow manager."""
return self.hass.config_entries.flow
@staticmethod
@callback
def async_get_options_flow(
config_entry: config_entries.ConfigEntry,
) -> OptionsFlowHandler:
"""Return the options flow."""
return OptionsFlowHandler(config_entry)
async def async_step_user(
self, user_input: dict[str, Any] | None = None
) -> FlowResult:
"""Handle the initial step."""
if is_hassio(self.hass):
return await self.async_step_on_supervisor()
return await self.async_step_manual()
async def async_step_usb(self, discovery_info: dict[str, str]) -> FlowResult:
"""Handle USB Discovery."""
if not is_hassio(self.hass):
return self.async_abort(reason="discovery_requires_supervisor")
if self._async_current_entries():
return self.async_abort(reason="already_configured")
if self._async_in_progress():
return self.async_abort(reason="already_in_progress")
vid = discovery_info["vid"]
pid = discovery_info["pid"]
serial_number = discovery_info["serial_number"]
device = discovery_info["device"]
manufacturer = discovery_info["manufacturer"]
description = discovery_info["description"]
# The Nortek sticks are a special case since they
# have a Z-Wave and a Zigbee radio. We need to reject
# the Zigbee radio.
if vid == "10C4" and pid == "8A2A" and "Z-Wave" not in description:
return self.async_abort(reason="not_zwave_device")
# Zooz uses this vid/pid, but so do 2652 sticks
if vid == "10C4" and pid == "EA60" and "2652" in description:
return self.async_abort(reason="not_zwave_device")
addon_info = await self._async_get_addon_info()
if addon_info.state not in (AddonState.NOT_INSTALLED, AddonState.NOT_RUNNING):
return self.async_abort(reason="already_configured")
await self.async_set_unique_id(
f"{vid}:{pid}_{serial_number}_{manufacturer}_{description}"
)
self._abort_if_unique_id_configured()
dev_path = await self.hass.async_add_executor_job(usb.get_serial_by_id, device)
self.usb_path = dev_path
self._title = usb.human_readable_device_name(
dev_path,
serial_number,
manufacturer,
description,
vid,
pid,
)
self.context["title_placeholders"] = {CONF_NAME: self._title}
return await self.async_step_usb_confirm()
async def async_step_usb_confirm(
self, user_input: dict[str, Any] | None = None
) -> FlowResult:
"""Handle USB Discovery confirmation."""
if user_input is None:
return self.async_show_form(
step_id="usb_confirm",
description_placeholders={CONF_NAME: self._title},
data_schema=vol.Schema({}),
)
return await self.async_step_on_supervisor({CONF_USE_ADDON: True})
async def async_step_manual(
self, user_input: dict[str, Any] | None = None
) -> FlowResult:
"""Handle a manual configuration."""
if user_input is None:
return self.async_show_form(
step_id="manual", data_schema=get_manual_schema({})
)
errors = {}
try:
version_info = await validate_input(self.hass, user_input)
except InvalidInput as err:
errors["base"] = err.error
except Exception: # pylint: disable=broad-except
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
else:
await self.async_set_unique_id(
version_info.home_id, raise_on_progress=False
)
# Make sure we disable any add-on handling
# if the controller is reconfigured in a manual step.
self._abort_if_unique_id_configured(
updates={
**user_input,
CONF_USE_ADDON: False,
CONF_INTEGRATION_CREATED_ADDON: False,
}
)
self.ws_address = user_input[CONF_URL]
return self._async_create_entry_from_vars()
return self.async_show_form(
step_id="manual", data_schema=get_manual_schema(user_input), errors=errors
)
async def async_step_hassio(self, discovery_info: dict[str, Any]) -> FlowResult:
"""Receive configuration from add-on discovery info.
This flow is triggered by the Z-Wave JS add-on.
"""
if self._async_in_progress():
return self.async_abort(reason="already_in_progress")
self.ws_address = f"ws://{discovery_info['host']}:{discovery_info['port']}"
try:
version_info = await async_get_version_info(self.hass, self.ws_address)
except CannotConnect:
return self.async_abort(reason="cannot_connect")
await self.async_set_unique_id(version_info.home_id)
self._abort_if_unique_id_configured(updates={CONF_URL: self.ws_address})
return await self.async_step_hassio_confirm()
async def async_step_hassio_confirm(
self, user_input: dict[str, Any] | None = None
) -> FlowResult:
"""Confirm the add-on discovery."""
if user_input is not None:
return await self.async_step_on_supervisor(
user_input={CONF_USE_ADDON: True}
)
return self.async_show_form(step_id="hassio_confirm")
async def async_step_on_supervisor(
self, user_input: dict[str, Any] | None = None
) -> FlowResult:
"""Handle logic when on Supervisor host."""
if user_input is None:
return self.async_show_form(
step_id="on_supervisor", data_schema=ON_SUPERVISOR_SCHEMA
)
if not user_input[CONF_USE_ADDON]:
return await self.async_step_manual()
self.use_addon = True
addon_info = await self._async_get_addon_info()
if addon_info.state == AddonState.RUNNING:
addon_config = addon_info.options
self.usb_path = addon_config[CONF_ADDON_DEVICE]
self.network_key = addon_config.get(CONF_ADDON_NETWORK_KEY, "")
return await self.async_step_finish_addon_setup()
if addon_info.state == AddonState.NOT_RUNNING:
return await self.async_step_configure_addon()
return await self.async_step_install_addon()
async def async_step_configure_addon(
self, user_input: dict[str, Any] | None = None
) -> FlowResult:
"""Ask for config for Z-Wave JS add-on."""
addon_info = await self._async_get_addon_info()
addon_config = addon_info.options
if user_input is not None:
self.network_key = user_input[CONF_NETWORK_KEY]
self.usb_path = user_input[CONF_USB_PATH]
new_addon_config = {
**addon_config,
CONF_ADDON_DEVICE: self.usb_path,
CONF_ADDON_NETWORK_KEY: self.network_key,
}
if new_addon_config != addon_config:
await self._async_set_addon_config(new_addon_config)
return await self.async_step_start_addon()
usb_path = addon_config.get(CONF_ADDON_DEVICE) or self.usb_path or ""
network_key = addon_config.get(CONF_ADDON_NETWORK_KEY, self.network_key or "")
data_schema = vol.Schema(
{
vol.Required(CONF_USB_PATH, default=usb_path): str,
vol.Optional(CONF_NETWORK_KEY, default=network_key): str,
}
)
return self.async_show_form(step_id="configure_addon", data_schema=data_schema)
async def async_step_finish_addon_setup(
self, user_input: dict[str, Any] | None = None
) -> FlowResult:
"""Prepare info needed to complete the config entry.
Get add-on discovery info and server version info.
Set unique id and abort if already configured.
"""
if not self.ws_address:
discovery_info = await self._async_get_addon_discovery_info()
self.ws_address = f"ws://{discovery_info['host']}:{discovery_info['port']}"
if not self.unique_id or self.context["source"] == config_entries.SOURCE_USB:
if not self.version_info:
try:
self.version_info = await async_get_version_info(
self.hass, self.ws_address
)
except CannotConnect as err:
raise AbortFlow("cannot_connect") from err
await self.async_set_unique_id(
self.version_info.home_id, raise_on_progress=False
)
self._abort_if_unique_id_configured(
updates={
CONF_URL: self.ws_address,
CONF_USB_PATH: self.usb_path,
CONF_NETWORK_KEY: self.network_key,
}
)
return self._async_create_entry_from_vars()
@callback
def _async_create_entry_from_vars(self) -> FlowResult:
"""Return a config entry for the flow."""
# Abort any other flows that may be in progress
for progress in self._async_in_progress():
self.hass.config_entries.flow.async_abort(progress["flow_id"])
return self.async_create_entry(
title=TITLE,
data={
CONF_URL: self.ws_address,
CONF_USB_PATH: self.usb_path,
CONF_NETWORK_KEY: self.network_key,
CONF_USE_ADDON: self.use_addon,
CONF_INTEGRATION_CREATED_ADDON: self.integration_created_addon,
},
)
class OptionsFlowHandler(BaseZwaveJSFlow, config_entries.OptionsFlow):
"""Handle an options flow for Z-Wave JS."""
def __init__(self, config_entry: config_entries.ConfigEntry) -> None:
"""Set up the options flow."""
super().__init__()
self.config_entry = config_entry
self.original_addon_config: dict[str, Any] | None = None
self.revert_reason: str | None = None
@property
def flow_manager(self) -> config_entries.OptionsFlowManager:
"""Return the correct flow manager."""
return self.hass.config_entries.options
@callback
def _async_update_entry(self, data: dict[str, Any]) -> None:
"""Update the config entry with new data."""
self.hass.config_entries.async_update_entry(self.config_entry, data=data)
async def async_step_init(
self, user_input: dict[str, Any] | None = None
) -> FlowResult:
"""Manage the options."""
if is_hassio(self.hass):
return await self.async_step_on_supervisor()
return await self.async_step_manual()
async def async_step_manual(
self, user_input: dict[str, Any] | None = None
) -> FlowResult:
"""Handle a manual configuration."""
if user_input is None:
return self.async_show_form(
step_id="manual",
data_schema=get_manual_schema(
{CONF_URL: self.config_entry.data[CONF_URL]}
),
)
errors = {}
try:
version_info = await validate_input(self.hass, user_input)
except InvalidInput as err:
errors["base"] = err.error
except Exception: # pylint: disable=broad-except
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
else:
if self.config_entry.unique_id != version_info.home_id:
return self.async_abort(reason="different_device")
# Make sure we disable any add-on handling
# if the controller is reconfigured in a manual step.
self._async_update_entry(
{
**self.config_entry.data,
**user_input,
CONF_USE_ADDON: False,
CONF_INTEGRATION_CREATED_ADDON: False,
}
)
self.hass.async_create_task(
self.hass.config_entries.async_reload(self.config_entry.entry_id)
)
return self.async_create_entry(title=TITLE, data={})
return self.async_show_form(
step_id="manual", data_schema=get_manual_schema(user_input), errors=errors
)
async def async_step_on_supervisor(
self, user_input: dict[str, Any] | None = None
) -> FlowResult:
"""Handle logic when on Supervisor host."""
if user_input is None:
return self.async_show_form(
step_id="on_supervisor",
data_schema=get_on_supervisor_schema(
{CONF_USE_ADDON: self.config_entry.data.get(CONF_USE_ADDON, True)}
),
)
if not user_input[CONF_USE_ADDON]:
return await self.async_step_manual()
addon_info = await self._async_get_addon_info()
if addon_info.state == AddonState.NOT_INSTALLED:
return await self.async_step_install_addon()
return await self.async_step_configure_addon()
async def async_step_configure_addon(
self, user_input: dict[str, Any] | None = None
) -> FlowResult:
"""Ask for config for Z-Wave JS add-on."""
addon_info = await self._async_get_addon_info()
addon_config = addon_info.options
if user_input is not None:
self.network_key = user_input[CONF_NETWORK_KEY]
self.usb_path = user_input[CONF_USB_PATH]
new_addon_config = {
**addon_config,
CONF_ADDON_DEVICE: self.usb_path,
CONF_ADDON_NETWORK_KEY: self.network_key,
CONF_ADDON_LOG_LEVEL: user_input[CONF_LOG_LEVEL],
CONF_ADDON_EMULATE_HARDWARE: user_input[CONF_EMULATE_HARDWARE],
}
if new_addon_config != addon_config:
if addon_info.state == AddonState.RUNNING:
self.restart_addon = True
# Copy the add-on config to keep the objects separate.
self.original_addon_config = dict(addon_config)
await self._async_set_addon_config(new_addon_config)
if addon_info.state == AddonState.RUNNING and not self.restart_addon:
return await self.async_step_finish_addon_setup()
if (
self.config_entry.data.get(CONF_USE_ADDON)
and self.config_entry.state == config_entries.ConfigEntryState.LOADED
):
# Disconnect integration before restarting add-on.
await disconnect_client(self.hass, self.config_entry)
return await self.async_step_start_addon()
usb_path = addon_config.get(CONF_ADDON_DEVICE, self.usb_path or "")
network_key = addon_config.get(CONF_ADDON_NETWORK_KEY, self.network_key or "")
log_level = addon_config.get(CONF_ADDON_LOG_LEVEL, "info")
emulate_hardware = addon_config.get(CONF_ADDON_EMULATE_HARDWARE, False)
data_schema = vol.Schema(
{
vol.Required(CONF_USB_PATH, default=usb_path): str,
vol.Optional(CONF_NETWORK_KEY, default=network_key): str,
vol.Optional(CONF_LOG_LEVEL, default=log_level): vol.In(
ADDON_LOG_LEVELS
),
vol.Optional(CONF_EMULATE_HARDWARE, default=emulate_hardware): bool,
}
)
return self.async_show_form(step_id="configure_addon", data_schema=data_schema)
async def async_step_start_failed(
self, user_input: dict[str, Any] | None = None
) -> FlowResult:
"""Add-on start failed."""
return await self.async_revert_addon_config(reason="addon_start_failed")
async def async_step_finish_addon_setup(
self, user_input: dict[str, Any] | None = None
) -> FlowResult:
"""Prepare info needed to complete the config entry update.
Get add-on discovery info and server version info.
Check for same unique id and abort if not the same unique id.
"""
if self.revert_reason:
self.original_addon_config = None
reason = self.revert_reason
self.revert_reason = None
return await self.async_revert_addon_config(reason=reason)
if not self.ws_address:
discovery_info = await self._async_get_addon_discovery_info()
self.ws_address = f"ws://{discovery_info['host']}:{discovery_info['port']}"
if not self.version_info:
try:
self.version_info = await async_get_version_info(
self.hass, self.ws_address
)
except CannotConnect:
return await self.async_revert_addon_config(reason="cannot_connect")
if self.config_entry.unique_id != self.version_info.home_id:
return await self.async_revert_addon_config(reason="different_device")
self._async_update_entry(
{
**self.config_entry.data,
CONF_URL: self.ws_address,
CONF_USB_PATH: self.usb_path,
CONF_NETWORK_KEY: self.network_key,
CONF_USE_ADDON: True,
CONF_INTEGRATION_CREATED_ADDON: self.integration_created_addon,
}
)
# Always reload entry since we may have disconnected the client.
self.hass.async_create_task(
self.hass.config_entries.async_reload(self.config_entry.entry_id)
)
return self.async_create_entry(title=TITLE, data={})
async def async_revert_addon_config(self, reason: str) -> FlowResult:
"""Abort the options flow.
If the add-on options have been changed, revert those and restart add-on.
"""
# If reverting the add-on options failed, abort immediately.
if self.revert_reason:
_LOGGER.error(
"Failed to revert add-on options before aborting flow, reason: %s",
reason,
)
if self.revert_reason or not self.original_addon_config:
self.hass.async_create_task(
self.hass.config_entries.async_reload(self.config_entry.entry_id)
)
return self.async_abort(reason=reason)
self.revert_reason = reason
addon_config_input = {
ADDON_USER_INPUT_MAP[addon_key]: addon_val
for addon_key, addon_val in self.original_addon_config.items()
}
_LOGGER.debug("Reverting add-on options, reason: %s", reason)
return await self.async_step_configure_addon(addon_config_input)
class CannotConnect(exceptions.HomeAssistantError):
"""Indicate connection error."""
class InvalidInput(exceptions.HomeAssistantError):
"""Error to indicate input data is invalid."""
def __init__(self, error: str) -> None:
"""Initialize error."""
super().__init__()
self.error = error
| 37.447667
| 88
| 0.63096
|
5bf61ac30ffe7734c8cd119953ff1f8a2324405d
| 21
|
py
|
Python
|
example_snippets/multimenus_snippets/Snippets/SciPy/Physical and mathematical constants/Units/Volume/One fluid ounce (US) in cubic meters.py
|
kuanpern/jupyterlab-snippets-multimenus
|
477f51cfdbad7409eab45abe53cf774cd70f380c
|
[
"BSD-3-Clause"
] | null | null | null |
example_snippets/multimenus_snippets/Snippets/SciPy/Physical and mathematical constants/Units/Volume/One fluid ounce (US) in cubic meters.py
|
kuanpern/jupyterlab-snippets-multimenus
|
477f51cfdbad7409eab45abe53cf774cd70f380c
|
[
"BSD-3-Clause"
] | null | null | null |
example_snippets/multimenus_snippets/Snippets/SciPy/Physical and mathematical constants/Units/Volume/One fluid ounce (US) in cubic meters.py
|
kuanpern/jupyterlab-snippets-multimenus
|
477f51cfdbad7409eab45abe53cf774cd70f380c
|
[
"BSD-3-Clause"
] | 1
|
2021-02-04T04:51:48.000Z
|
2021-02-04T04:51:48.000Z
|
constants.fluid_ounce
| 21
| 21
| 0.952381
|
33ab89acdf761f5254a978ba6feb7370c20865cb
| 1,271
|
py
|
Python
|
Easy/429.py
|
Hellofafar/Leetcode
|
7a459e9742958e63be8886874904e5ab2489411a
|
[
"CNRI-Python"
] | 6
|
2017-09-25T18:05:50.000Z
|
2019-03-27T00:23:15.000Z
|
Easy/429.py
|
Hellofafar/Leetcode
|
7a459e9742958e63be8886874904e5ab2489411a
|
[
"CNRI-Python"
] | 1
|
2017-10-29T12:04:41.000Z
|
2018-08-16T18:00:37.000Z
|
Easy/429.py
|
Hellofafar/Leetcode
|
7a459e9742958e63be8886874904e5ab2489411a
|
[
"CNRI-Python"
] | null | null | null |
# ------------------------------
# 429. N-ary Tree Level Order Traversal
#
# Description:
# Given an n-ary tree, return the level order traversal of its nodes' values. (ie, from left to right, level by level).
# For example, given a 3-ary tree:
# [
# [1],
# [3,2,4],
# [5,6]
# ]
#
# Note:
# The depth of the tree is at most 1000.
# The total number of nodes is at most 5000.
#
# Version: 1.0
# 07/14/18 by Jianfa
# ------------------------------
"""
# Definition for a Node.
class Node(object):
def __init__(self, val, children):
self.val = val
self.children = children
"""
class Solution(object):
def levelOrder(self, root):
"""
:type root: Node
:rtype: List[List[int]]
"""
if not root:
return []
visited = [root]
res = []
while visited:
level = []
for i in range(len(visited)):
curr = visited.pop(0)
level.append(curr.val)
if curr.children:
visited += curr.children
res.append(level)
return res
# Used for testing
if __name__ == "__main__":
test = Solution()
# ------------------------------
# Summary:
# BFS.
| 22.298246
| 119
| 0.483871
|
b8ab1ae75a55771cb48c813c01490912466d5fb1
| 347
|
py
|
Python
|
OpenGLCffi/EGL/EXT/NV/native_query.py
|
cydenix/OpenGLCffi
|
c78f51ae5e6b655eb2ea98f072771cf69e2197f3
|
[
"MIT"
] | null | null | null |
OpenGLCffi/EGL/EXT/NV/native_query.py
|
cydenix/OpenGLCffi
|
c78f51ae5e6b655eb2ea98f072771cf69e2197f3
|
[
"MIT"
] | null | null | null |
OpenGLCffi/EGL/EXT/NV/native_query.py
|
cydenix/OpenGLCffi
|
c78f51ae5e6b655eb2ea98f072771cf69e2197f3
|
[
"MIT"
] | null | null | null |
from OpenGLCffi.EGL import params
@params(api='egl', prms=['dpy', 'display_id'])
def eglQueryNativeDisplayNV(dpy, display_id):
pass
@params(api='egl', prms=['dpy', 'surf', 'window'])
def eglQueryNativeWindowNV(dpy, surf, window):
pass
@params(api='egl', prms=['dpy', 'surf', 'pixmap'])
def eglQueryNativePixmapNV(dpy, surf, pixmap):
pass
| 20.411765
| 50
| 0.700288
|
0a670a2e75f50aa4d41f996f2a0bc52056552c53
| 1,000
|
py
|
Python
|
python-cim/samples/dump_class_layout.py
|
pombredanne/flare-wmi
|
fdde184b7bf98e1043f8246e0c75a7a1316c48c9
|
[
"Apache-2.0"
] | 390
|
2015-08-06T20:33:01.000Z
|
2021-09-11T21:43:34.000Z
|
python-cim/samples/dump_class_layout.py
|
pombredanne/flare-wmi
|
fdde184b7bf98e1043f8246e0c75a7a1316c48c9
|
[
"Apache-2.0"
] | 15
|
2015-08-11T18:02:10.000Z
|
2019-08-11T10:16:49.000Z
|
python-cim/samples/dump_class_layout.py
|
pombredanne/flare-wmi
|
fdde184b7bf98e1043f8246e0c75a7a1316c48c9
|
[
"Apache-2.0"
] | 106
|
2015-08-08T22:44:50.000Z
|
2021-08-25T09:20:57.000Z
|
import logging
import traceback
from cim import CIM
from cim import Index
from cim.objects import ObjectResolver
from cim.formatters import dump_layout
def main(type_, path, namespaceName, className):
if type_ not in ("xp", "win7"):
raise RuntimeError("Invalid mapping type: {:s}".format(type_))
c = CIM(type_, path)
i = Index(c.cim_type, c.logical_index_store)
o = ObjectResolver(c, i)
print("%s" % "=" * 80)
print("namespace: %s" % namespaceName)
try:
cd = o.get_cd(namespaceName, className)
except IndexError:
print("ERROR: failed to find requested class definition")
return
print(cd.tree())
cl = o.get_cl(namespaceName, className)
print(cl.derivation)
try:
print(dump_layout(cd, cl))
except:
print("ERROR: failed to dump class definition!")
print(traceback.format_exc())
if __name__ == "__main__":
logging.basicConfig(level=logging.INFO)
import sys
main(*sys.argv[1:])
| 25.641026
| 70
| 0.658
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.