hexsha stringlengths 40 40 | size int64 5 2.06M | ext stringclasses 10 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 3 248 | max_stars_repo_name stringlengths 5 125 | max_stars_repo_head_hexsha stringlengths 40 78 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 3 248 | max_issues_repo_name stringlengths 5 125 | max_issues_repo_head_hexsha stringlengths 40 78 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 3 248 | max_forks_repo_name stringlengths 5 125 | max_forks_repo_head_hexsha stringlengths 40 78 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 5 2.06M | avg_line_length float64 1 1.02M | max_line_length int64 3 1.03M | alphanum_fraction float64 0 1 | count_classes int64 0 1.6M | score_classes float64 0 1 | count_generators int64 0 651k | score_generators float64 0 1 | count_decorators int64 0 990k | score_decorators float64 0 1 | count_async_functions int64 0 235k | score_async_functions float64 0 1 | count_documentation int64 0 1.04M | score_documentation float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
2ddb711319d2de41b35803baad0302688374879f | 1,600 | py | Python | plots/plottingcompare.py | HPQC-LABS/Quantum-Graph-Spectra | b897d94dd03c48ffec5735b3dc5b86f8c3ab5a8f | [
"MIT"
] | 1 | 2020-07-29T06:42:32.000Z | 2020-07-29T06:42:32.000Z | plots/plottingcompare.py | HPQC-LABS/Quantum-Graph-Spectra | b897d94dd03c48ffec5735b3dc5b86f8c3ab5a8f | [
"MIT"
] | null | null | null | plots/plottingcompare.py | HPQC-LABS/Quantum-Graph-Spectra | b897d94dd03c48ffec5735b3dc5b86f8c3ab5a8f | [
"MIT"
] | 2 | 2021-03-29T13:40:47.000Z | 2021-03-29T13:41:00.000Z | '''
@author: Josh Payne
Description: For creating multiple overlaid charts
'''
from mpl_toolkits.axes_grid1 import host_subplot
import mpl_toolkits.axisartist as AA
import matplotlib.pyplot as plt
import numpy as np
### Get values from performance.py, input here ###
x = [4, 5, 8, 9, 16, 32, 64]
lst = [(2.585895228385925, 4.9364027687737175e-08), (13.683554983139038, 0.035110609181115214), (16.250244092941283, 0.17863540370194406), (63.24338710308075, 0.2964172356868715), (81.63710451126099, 0.4288713187054065), (457.574907541275, 1.5497983761583067), (2580.9958889484406, 3.939294261935955)]
# otherlist = [0.0023797988891601563, 1.6597139596939088, 1.7730239033699036, 2.4004372358322144, 2.2994803905487062, 1.8459036707878114, 1.3680771589279175]
times = [i[0] for i in lst]
accuracies = [i[1] for i in lst]
host = host_subplot(111, axes_class=AA.Axes)
plt.subplots_adjust(right=0.75)
par1 = host.twinx()
par2 = host.twinx()
offset = 0
new_fixed_axis = par2.get_grid_helper().new_fixed_axis
par2.axis["right"] = new_fixed_axis(loc="right",
axes=par2,
offset=(offset, 0))
par2.axis["right"].toggle(all=True)
host.set_xlabel("Number of Vertices")
host.set_ylabel("Mean Runtime (s)")
par1.set_ylabel("Mean Error")
p1, = host.plot(x, times, label="Mean Runtime (s)")
p2, = par1.plot(x, accuracies, label="Mean Error")
par1.set_ylim(-0.4, 3.99)
par2.set_ylim(0.4, 3.939)
host.legend()
par1.axis["left"].label.set_color(p1.get_color())
par2.axis["right"].label.set_color(p2.get_color())
plt.draw()
plt.show() | 30.769231 | 302 | 0.71 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 400 | 0.25 |
2ddc43ce9995edf0340e08d59a52ccaf4be2ad42 | 804 | py | Python | pulse2percept/datasets/__init__.py | pulse2percept/pulse2percept | 67e0f2354db5ebe306b617f7f78a9ea8c02327ac | [
"BSD-3-Clause"
] | 40 | 2019-11-01T14:09:34.000Z | 2022-02-28T19:08:01.000Z | pulse2percept/datasets/__init__.py | jgranley/pulse2percept | 65c11393a33d1531cd02a3e38243414bf8172e9a | [
"BSD-3-Clause"
] | 277 | 2019-11-22T03:30:31.000Z | 2022-03-28T00:11:03.000Z | pulse2percept/datasets/__init__.py | jgranley/pulse2percept | 65c11393a33d1531cd02a3e38243414bf8172e9a | [
"BSD-3-Clause"
] | 31 | 2020-01-22T06:36:36.000Z | 2022-01-20T09:54:25.000Z | """Utilities to download and import datasets.
* **Dataset loaders** can be used to load small datasets that come
pre-packaged with the pulse2percept software.
* **Dataset fetchers** can be used to download larger datasets from a given
URL and directly import them into pulse2percept.
.. autosummary::
:toctree: _api
base
horsager2009
beyeler2019
nanduri2012
.. seealso::
* :ref:`Basic Concepts > Datasets <topics-datasets>`
"""
from .base import clear_data_dir, get_data_dir, fetch_url
from .beyeler2019 import fetch_beyeler2019
from .horsager2009 import load_horsager2009
from .nanduri2012 import load_nanduri2012
__all__ = [
'clear_data_dir',
'fetch_url',
'fetch_beyeler2019',
'get_data_dir',
'load_horsager2009',
'load_nanduri2012',
]
| 22.333333 | 76 | 0.726368 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 563 | 0.700249 |
2ddca5cc12de3a408cda0ff45249cbd6d8e54333 | 7,452 | py | Python | pysnmp-with-texts/TIMETRA-SAS-IEEE8021-PAE-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 8 | 2019-05-09T17:04:00.000Z | 2021-06-09T06:50:51.000Z | pysnmp-with-texts/TIMETRA-SAS-IEEE8021-PAE-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 4 | 2019-05-31T16:42:59.000Z | 2020-01-31T21:57:17.000Z | pysnmp-with-texts/TIMETRA-SAS-IEEE8021-PAE-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 10 | 2019-04-30T05:51:36.000Z | 2022-02-16T03:33:41.000Z | #
# PySNMP MIB module TIMETRA-SAS-IEEE8021-PAE-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/TIMETRA-SAS-IEEE8021-PAE-MIB
# Produced by pysmi-0.3.4 at Wed May 1 15:21:45 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
Integer, ObjectIdentifier, OctetString = mibBuilder.importSymbols("ASN1", "Integer", "ObjectIdentifier", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ConstraintsUnion, SingleValueConstraint, ValueSizeConstraint, ValueRangeConstraint, ConstraintsIntersection = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsUnion", "SingleValueConstraint", "ValueSizeConstraint", "ValueRangeConstraint", "ConstraintsIntersection")
dot1xAuthConfigEntry, = mibBuilder.importSymbols("IEEE8021-PAE-MIB", "dot1xAuthConfigEntry")
ModuleCompliance, NotificationGroup, ObjectGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup", "ObjectGroup")
Counter32, NotificationType, Gauge32, MibIdentifier, ModuleIdentity, Bits, TimeTicks, MibScalar, MibTable, MibTableRow, MibTableColumn, ObjectIdentity, iso, Integer32, Counter64, IpAddress, Unsigned32 = mibBuilder.importSymbols("SNMPv2-SMI", "Counter32", "NotificationType", "Gauge32", "MibIdentifier", "ModuleIdentity", "Bits", "TimeTicks", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "ObjectIdentity", "iso", "Integer32", "Counter64", "IpAddress", "Unsigned32")
DisplayString, TruthValue, TextualConvention, RowStatus = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "TruthValue", "TextualConvention", "RowStatus")
timetraSASNotifyPrefix, timetraSASConfs, timetraSASModules, timetraSASObjs = mibBuilder.importSymbols("TIMETRA-SAS-GLOBAL-MIB", "timetraSASNotifyPrefix", "timetraSASConfs", "timetraSASModules", "timetraSASObjs")
TNamedItem, TPolicyStatementNameOrEmpty, ServiceAdminStatus = mibBuilder.importSymbols("TIMETRA-TC-MIB", "TNamedItem", "TPolicyStatementNameOrEmpty", "ServiceAdminStatus")
timeraSASIEEE8021PaeMIBModule = ModuleIdentity((1, 3, 6, 1, 4, 1, 6527, 6, 2, 1, 1, 17))
timeraSASIEEE8021PaeMIBModule.setRevisions(('1912-07-01 00:00',))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
if mibBuilder.loadTexts: timeraSASIEEE8021PaeMIBModule.setRevisionsDescriptions(('Rev 1.0 03 Aug 2012 00:00 1.0 release of the ALCATEL-SAS-IEEE8021-PAE-MIB.',))
if mibBuilder.loadTexts: timeraSASIEEE8021PaeMIBModule.setLastUpdated('1207010000Z')
if mibBuilder.loadTexts: timeraSASIEEE8021PaeMIBModule.setOrganization('Alcatel-Lucent')
if mibBuilder.loadTexts: timeraSASIEEE8021PaeMIBModule.setContactInfo('Alcatel-Lucent SROS Support Web: http://support.alcatel-lucent.com ')
if mibBuilder.loadTexts: timeraSASIEEE8021PaeMIBModule.setDescription("This document is the SNMP MIB module to manage and provision the 7x50 extensions to the IEEE8021-PAE-MIB (Port Access Entity nodule for managing IEEE 802.X) feature for the Alcatel 7210 device. Copyright 2004-2012 Alcatel-Lucent. All rights reserved. Reproduction of this document is authorized on the condition that the foregoing copyright notice is included. This SNMP MIB module (Specification) embodies Alcatel-Lucent's proprietary intellectual property. Alcatel-Lucent retains all title and ownership in the Specification, including any revisions. Alcatel-Lucent grants all interested parties a non-exclusive license to use and distribute an unmodified copy of this Specification in connection with management of Alcatel-Lucent products, and without fee, provided this copyright notice and license appear on all copies. This Specification is supplied 'as is', and Alcatel-Lucent makes no warranty, either express or implied, as to the use, operation, condition, or performance of the Specification.")
tmnxSASDot1xObjs = MibIdentifier((1, 3, 6, 1, 4, 1, 6527, 6, 2, 2, 2, 16))
tmnxSASDot1xAuthenticatorObjs = MibIdentifier((1, 3, 6, 1, 4, 1, 6527, 6, 2, 2, 2, 16, 1))
tmnxSASDot1xConformance = MibIdentifier((1, 3, 6, 1, 4, 1, 6527, 6, 2, 2, 1, 12))
tmnxDot1xSASCompliancs = MibIdentifier((1, 3, 6, 1, 4, 1, 6527, 6, 2, 2, 1, 12, 1))
tmnxDot1xSASGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 6527, 6, 2, 2, 1, 12, 2))
dot1xAuthConfigExtnTable = MibTable((1, 3, 6, 1, 4, 1, 6527, 6, 2, 2, 2, 16, 1, 1), )
if mibBuilder.loadTexts: dot1xAuthConfigExtnTable.setStatus('current')
if mibBuilder.loadTexts: dot1xAuthConfigExtnTable.setDescription('The table dot1xAuthConfigExtnTable allows configuration of RADIUS authentication parameters for the 802.1X PAE feature on port level.')
dot1xAuthConfigExtnEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6527, 6, 2, 2, 2, 16, 1, 1, 1), )
dot1xAuthConfigEntry.registerAugmentions(("TIMETRA-SAS-IEEE8021-PAE-MIB", "dot1xAuthConfigExtnEntry"))
dot1xAuthConfigExtnEntry.setIndexNames(*dot1xAuthConfigEntry.getIndexNames())
if mibBuilder.loadTexts: dot1xAuthConfigExtnEntry.setStatus('current')
if mibBuilder.loadTexts: dot1xAuthConfigExtnEntry.setDescription('dot1xAuthConfigExtnEntry is an entry (conceptual row) in the dot1xAuthConfigExtnTable. Each entry represents the configuration for Radius Authentication on a port. Entries have a presumed StorageType of nonVolatile.')
dot1xPortEtherTunnel = MibTableColumn((1, 3, 6, 1, 4, 1, 6527, 6, 2, 2, 2, 16, 1, 1, 1, 150), TruthValue().clone('false')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dot1xPortEtherTunnel.setStatus('current')
if mibBuilder.loadTexts: dot1xPortEtherTunnel.setDescription('The value of tmnxPortEtherDot1xTunnel specifies whether the DOT1X packet tunneling for the ethernet port is enabled or disabled. When tunneling is enabled, the port will not process any DOT1X packets but will tunnel them through instead.')
dot1xAuthConfigExtnGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 6527, 6, 2, 2, 1, 12, 2, 1)).setObjects(("TIMETRA-SAS-IEEE8021-PAE-MIB", "dot1xPortEtherTunnel"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
dot1xAuthConfigExtnGroup = dot1xAuthConfigExtnGroup.setStatus('current')
if mibBuilder.loadTexts: dot1xAuthConfigExtnGroup.setDescription('The group of objects supporting management of Radius authentication for the IEEE801.1X PAE feature on Alcatel 7210 SR series systems.')
dot1xAuthConfigExtnCompliance = ModuleCompliance((1, 3, 6, 1, 4, 1, 6527, 6, 2, 2, 1, 12, 1, 1)).setObjects(("TIMETRA-SAS-IEEE8021-PAE-MIB", "dot1xAuthConfigExtnGroup"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
dot1xAuthConfigExtnCompliance = dot1xAuthConfigExtnCompliance.setStatus('current')
if mibBuilder.loadTexts: dot1xAuthConfigExtnCompliance.setDescription('The compliance statement for management of Radius authentication for the IEEE801.1X PAE feature on Alcatel 7210 SR series systems.')
mibBuilder.exportSymbols("TIMETRA-SAS-IEEE8021-PAE-MIB", dot1xAuthConfigExtnGroup=dot1xAuthConfigExtnGroup, dot1xAuthConfigExtnEntry=dot1xAuthConfigExtnEntry, tmnxSASDot1xObjs=tmnxSASDot1xObjs, timeraSASIEEE8021PaeMIBModule=timeraSASIEEE8021PaeMIBModule, tmnxDot1xSASGroups=tmnxDot1xSASGroups, PYSNMP_MODULE_ID=timeraSASIEEE8021PaeMIBModule, dot1xAuthConfigExtnCompliance=dot1xAuthConfigExtnCompliance, tmnxDot1xSASCompliancs=tmnxDot1xSASCompliancs, dot1xPortEtherTunnel=dot1xPortEtherTunnel, tmnxSASDot1xConformance=tmnxSASDot1xConformance, dot1xAuthConfigExtnTable=dot1xAuthConfigExtnTable, tmnxSASDot1xAuthenticatorObjs=tmnxSASDot1xAuthenticatorObjs)
| 143.307692 | 1,076 | 0.800322 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3,463 | 0.464707 |
2ddf7737c3560fadbc544f9a488716622f8551b0 | 7,093 | py | Python | pipeline/mk_all_level1_fsf_bbr.py | lbconner/openfMRI | 265d8ef013dad14fd1741d5817d00f9411d85103 | [
"BSD-2-Clause"
] | 33 | 2015-02-17T17:21:43.000Z | 2021-08-23T08:27:10.000Z | pipeline/mk_all_level1_fsf_bbr.py | lbconner/openfMRI | 265d8ef013dad14fd1741d5817d00f9411d85103 | [
"BSD-2-Clause"
] | 13 | 2015-01-14T15:17:09.000Z | 2017-07-10T02:17:06.000Z | pipeline/mk_all_level1_fsf_bbr.py | lbconner/openfMRI | 265d8ef013dad14fd1741d5817d00f9411d85103 | [
"BSD-2-Clause"
] | 24 | 2015-01-27T10:02:47.000Z | 2021-03-19T20:05:35.000Z | #!/usr/bin/env python
""" mk_all_level1_fsf.py - make fsf files for all subjects
USAGE: python mk_all_level1_fsf_bbr.py <name of dataset> <modelnum> <basedir - default is staged> <nonlinear - default=1> <smoothing - default=0> <tasknum - default to all>
"""
## Copyright 2011, Russell Poldrack. All rights reserved.
## Redistribution and use in source and binary forms, with or without modification, are
## permitted provided that the following conditions are met:
## 1. Redistributions of source code must retain the above copyright notice, this list of
## conditions and the following disclaimer.
## 2. Redistributions in binary form must reproduce the above copyright notice, this list
## of conditions and the following disclaimer in the documentation and/or other materials
## provided with the distribution.
## THIS SOFTWARE IS PROVIDED BY RUSSELL POLDRACK ``AS IS'' AND ANY EXPRESS OR IMPLIED
## WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
## FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL RUSSELL POLDRACK OR
## CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
## CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
## SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
## ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
## NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
## ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import os
import glob
from mk_level1_fsf_bbr import *
from mk_level1_fsf import *
import launch_qsub
import argparse
import sys
def usage():
"""Print the docstring and exit with error."""
sys.stdout.write(__doc__)
sys.exit(2)
def parse_command_line():
parser = argparse.ArgumentParser(description='setup_subject')
#parser.add_argument('integers', metavar='N', type=int, nargs='+',help='an integer for the accumulator')
# set up boolean flags
parser.add_argument('--taskid', dest='taskid',
required=True,help='Task ID')
parser.add_argument('--parenv', dest='parenv',
default='2way',help='Parallel environment')
parser.add_argument('--anatimg', dest='anatimg',
default='',help='Specified anatomy image')
parser.add_argument('--tasknum', dest='tasknum',type=int,
help='Task number')
parser.add_argument('--basedir', dest='basedir',
default=os.getcwd(),help='Base directory (above taskid directory)')
parser.add_argument('--smoothing', dest='smoothing',type=int,
default=0,help='Smoothing (mm FWHM)')
parser.add_argument('--noconfound', dest='confound', action='store_false',
default=True,help='Omit motion/confound modeling')
parser.add_argument('--use_inplane', dest='use_inplane', type=int,
default=0,help='Use inplane image')
parser.add_argument('--nonlinear', dest='nonlinear', action='store_true',
default=False,help='Use nonlinear regristration')
parser.add_argument('--nobbr', dest='nobbr', action='store_true',
default=False,help='Use standard reg instead of BBR')
parser.add_argument('--nohpf', dest='hpf', action='store_false',
default=True,help='Turn off high pass filtering')
parser.add_argument('--nowhiten', dest='whiten', action='store_false',
default=True,help='Turn off prewhitening')
parser.add_argument('--test', dest='test', action='store_true',
default=False,help='Test mode (do not run job)')
parser.add_argument('--nolaunch', dest='launch', action='store_false',
default=True,help='Do not launch job')
parser.add_argument('--modelnum', dest='modelnum',type=int,
default=1,help='Model number')
parser.add_argument('--ncores', dest='ncores',type=int,
default=0,help='number of cores (ncores * way = 12)')
args = parser.parse_args()
return args
def main():
args=parse_command_line()
print args
smoothing=args.smoothing
use_inplane=args.use_inplane
basedir=os.path.abspath(args.basedir)
nonlinear=args.nonlinear
modelnum=args.modelnum
if not args.confound:
print 'omitting confound modeling'
dataset=args.taskid
if not args.test:
outfile=open('mk_all_level1_%s.sh'%dataset,'w')
tasknum_spec='task*'
if not args.tasknum==None:
tasknum_spec='task%03d*'%args.tasknum
dsdir=os.path.join(basedir,dataset)
bolddirs=glob.glob(os.path.join(dsdir,'sub*/BOLD/%s'%tasknum_spec))
print bolddirs
for root in bolddirs:
#print 'ROOT:',root
for m in glob.glob(os.path.join(root,'bold_mcf_brain.nii.gz')):
#print 'BOLDFILE:',m
f_split=root.split('/')
#print f_split
scankey='/'+'/'.join(f_split[1:7])+'/scan_key.txt'
taskid=f_split[6]
subnum=int(f_split[7].lstrip('sub'))
taskinfo=f_split[9].split('_')
tasknum=int(taskinfo[0].lstrip('task'))
runnum=int(taskinfo[1].lstrip('run'))
#tr=float(load_scankey(scankey)['TR'])
# check for inplane
inplane='/'+'/'.join(f_split[1:8])+'/anatomy/inplane001_brain.nii.gz'
## if args.nobbr:
## print 'using nobbr option'
## print 'mk_level1_fsf("%s",%d,%d,%d,%d,%d,"%s",%d)'%(taskid,subnum,tasknum,runnum,smoothing,use_inplane,basedir,modelnum)
## else:
## print 'mk_level1_fsf_bbr("%s",%d,%d,%d,%d,%d,"%s",%d)'%(taskid,subnum,tasknum,runnum,smoothing,use_inplane,basedir,modelnum)
if not args.test:
if args.nobbr:
fname=mk_level1_fsf(taskid,subnum,tasknum,runnum,smoothing,use_inplane,basedir,nonlinear,modelnum)
else:
fname=mk_level1_fsf_bbr(taskid,subnum,tasknum,runnum,smoothing,use_inplane,basedir,nonlinear,modelnum,args.anatimg,args.confound,args.hpf,args.whiten)
#print 'CMD: mk_level1_fsf_bbr(taskid,subnum,tasknum,runnum,smoothing,use_inplane,basedir,nonlinear,modelnum,args.anatimg,args.confound)'
outfile.write('feat %s\n'%fname)
if not args.test:
outfile.close()
if not args.test:
print 'now launching all feats:'
print "find %s/sub*/model/*.fsf |sed 's/^/feat /' > run_all_feats.sh; sh run_all_feats.sh"%args.taskid
f=open('mk_all_level1_%s.sh'%dataset)
l=f.readlines()
f.close()
njobs=len(l)
if args.parenv=='':
args.parenv='6way'
way=float(args.parenv.replace('way',''))
if args.ncores==0:
ncores=(njobs/way)*12.0
else:
ncores=args.ncores
if args.launch:
launch_qsub.launch_qsub(script_name='mk_all_level1_%s.sh'%dataset,runtime='04:00:00',jobname='%sl1'%dataset,email=False,parenv=args.parenv,ncores=ncores)
if __name__ == '__main__':
main()
| 41.238372 | 172 | 0.661638 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3,597 | 0.50712 |
2de03c9b6f61130c7a0de0bbdb46eed6409dcbac | 2,492 | py | Python | gettweets.py | ketankokane94/twitter-analysis | 7820fe7d3350af5066799c7540352993b1ecc36c | [
"MIT"
] | null | null | null | gettweets.py | ketankokane94/twitter-analysis | 7820fe7d3350af5066799c7540352993b1ecc36c | [
"MIT"
] | null | null | null | gettweets.py | ketankokane94/twitter-analysis | 7820fe7d3350af5066799c7540352993b1ecc36c | [
"MIT"
] | null | null | null | import tweepy
import csv
class dealWithTwitter:
def __init__(self):
self.access_token = ""
self.access_token_secret = ""
self.consumer_key = ""
self.consumer_secret = ""
self.api = ""
def loadTokens(self):
tokens = []
with open('pwd.txt') as pwd_file:
for line in pwd_file:
tokens.append(line.strip())
print(line.strip())
self.access_key = tokens[0]
self.access_secret = tokens[1]
self.consumer_key = tokens[2]
self.consumer_secret = tokens[3]
def get_all_tweets(self, screen_name):
# Twitter only allows access to a users most recent 3240 tweets with this method
self.loadTokens()
auth = tweepy.OAuthHandler(self.consumer_key, self.consumer_secret)
auth.set_access_token(self.access_key, self.access_secret)
self.api = tweepy.API(auth)
# initialize a list to hold all the tweepy Tweets
alltweets = []
# make initial request for most recent tweets (200 is the maximum allowed count)
new_tweets = self.api.user_timeline(screen_name=screen_name, count=200)
# save most recent tweets
alltweets.extend(new_tweets)
# save the id of the oldest tweet less one
oldest = alltweets[-1].id - 1
while len(new_tweets) > 0:
print("getting tweets before %s" % (oldest))
# all subsiquent requests use the max_id param to prevent duplicates
new_tweets = self.api.user_timeline(screen_name=screen_name, count=200, max_id=oldest)
# save most recent tweets
alltweets.extend(new_tweets)
# update the id of the oldest tweet less one
oldest = alltweets[-1].id - 1
print("...%s tweets downloaded so far" % (len(alltweets)))
# transform the tweepy tweets into a 2D array that will populate the csv
outtweets = [[tweet.id_str, tweet.created_at, tweet.text.encode("utf-8")] for tweet in alltweets]
# write the csv
try:
with open('%s_tweets.csv' % screen_name, 'wb') as f:
writer = csv.writer(f)
writer.writerow(["id".encode(), "created_at".encode(), "text".encode()])
writer.writerows(outtweets)
except:
pass
if __name__ == '__main__':
t = dealWithTwitter()
t.loadTokens()
t.get_all_tweets('realDonaldTrump')
# t.connectToTwitter()
| 38.338462 | 105 | 0.611156 | 2,323 | 0.932183 | 0 | 0 | 0 | 0 | 0 | 0 | 674 | 0.270465 |
2de11305e60f62768eeb9919ec2876747df54327 | 22,175 | py | Python | main.py | Swaraj-Deep/UAV-GRN-DRN | f18fe05751794bc7530b58312f6fadacb1e4a500 | [
"MIT"
] | 1 | 2020-06-16T11:27:30.000Z | 2020-06-16T11:27:30.000Z | main.py | Swaraj-Deep/UAV-GRN-DRN | f18fe05751794bc7530b58312f6fadacb1e4a500 | [
"MIT"
] | null | null | null | main.py | Swaraj-Deep/UAV-GRN-DRN | f18fe05751794bc7530b58312f6fadacb1e4a500 | [
"MIT"
] | null | null | null | import json
import random
import numpy as np
import os
import time
import os.path
import networkx as nx
import users_endpoint.users
import grn_endpoint.grn_info
import move_endpoint.movement
import reward_endpoint.rewards
import matplotlib.pyplot as plt
from matplotlib.backends.backend_pdf import PdfPages
# Global variables declaration
# User coverage threshold
coverage_threshold = 0
# GRN Edge Similarity threshold
similarity_threshold = 0
# Start time of program
start_time = 0
# End time of program
end_time = 0
# Threshold for UAVs i.e each UAV must be placed at least this distance away
t = 0
# Number of rows and columns in the grid
N = 0
M = 0
# Exploration and exploitation rate of the agent
epsilon = 0
# Learning rate of the agent
learning_rate = 0
# Decay factor for exploration rate
decay_factor = 0
# Number of UAVs
number_UAV = 0
# Variable to hold the UAV to UAV Threshold
UAV_to_UAV_threshold = 0
# Radius of the UAV
radius_UAV = 0
# Power of the UAV
power_UAV = 0
# Maximum iteration for the algorithm
max_iter = 0
# Dictionary to hold the location of jth UAV
# Key in this dictionary is the UAV_node
# Value is the location in which it is placed
UAV_location = {}
# List to contain already connected ground users
ground_placed = []
# Cell Size of grid
cell_size = 0
# Unit multiplier
unit_mul = 0
def init():
"""
Function: init
Functionality: Sets all the global variables
"""
global start_time
global similarity_threshold
global coverage_threshold
start_time = time.time()
global N
global M
global t
global epsilon
global learning_rate
global decay_factor
global max_iter
global number_UAV
global radius_UAV
global UAV_to_UAV_threshold
global power_UAV
global cell_size, unit_mul
parent_dir = os.getcwd()
dir_path = os.path.join(parent_dir, 'output_files')
try:
os.mkdir(dir_path)
except OSError as error:
pass
dir_path = os.path.join(parent_dir, 'graph_output_files')
try:
os.mkdir(dir_path)
except OSError as error:
pass
parent_dir = os.getcwd()
folder_name = 'input_files'
file_name = 'scenario_input.json'
file_path = os.path.join(parent_dir, folder_name, file_name)
with open(file_path, 'r') as file_pointer:
file_data = json.load(file_pointer)
N = file_data['N']
M = file_data['M']
t = file_data['t']
epsilon = file_data['epsilon']
learning_rate = file_data['learning_rate']
decay_factor = file_data['decay_factor']
max_iter = file_data['max_iter']
number_UAV = file_data['number_UAV']
radius_UAV = file_data['radius_UAV']
UAV_to_UAV_threshold = file_data['UAV_to_UAV_threshold']
power_UAV = file_data['power_UAV']
coverage_threshold = file_data['coverage_threshold']
similarity_threshold = file_data['similarity_threshold']
cell_size = file_data['cell_size']
unit_mul = file_data['unit_multiplier']
UAV_to_UAV_threshold *= unit_mul
radius_UAV *= unit_mul
cell_size *= unit_mul
t *= unit_mul
t //= cell_size
users_endpoint.users.init()
grn_endpoint.grn_info.init_custom()
def q_learn(UAV_node, placed, flag):
"""
Function: q_learn\n
Parameters: UAV_node -> UAV_node which is to be placed, placed -> list of already placed UAV_nodes, flag -> True if only user coverage is considered\n
Return: the optimal position where the UAV_node needs to be placed\n
"""
global N
global M
global epsilon
global learning_rate
global decay_factor
global max_iter
global power_UAV
global UAV_location
global radius_UAV
global t
Q = np.zeros((N * M, 15))
# Centroid Location
# loc = move_endpoint.movement.get_centroid_location(
# N, M, UAV_location, int(UAV_to_UAV_threshold // cell_size))
# Center Location
# loc = move_endpoint.movement.get_center_location(N, M)
# Random Location
loc = move_endpoint.movement.get_random_location(N, M)
# Vicinity Location
# loc = move_endpoint.movement.get_vicinity_location(
# N, M, UAV_location, int(UAV_to_UAV_threshold // cell_size))
epsilon_val = epsilon
# low, medium, high power
action_power = [0, 5, 10]
for iterations in range(max_iter):
x, y, action, power_factor = move_endpoint.movement.get_random_move(
loc, N, M)
loc = (x, y)
action += action_power[power_factor]
power_UAV += power_factor
if random.uniform(0, 1) <= epsilon_val:
index = move_endpoint.movement.map_2d_to_1d(loc, N)
if flag:
Q[index, action] = reward_endpoint.rewards.reward_function_user(
UAV_node, placed, loc, UAV_location, t, power_UAV, int(UAV_to_UAV_threshold // cell_size), int(radius_UAV // cell_size), N, M, set(ground_placed))
else:
Q[index, action] = reward_endpoint.rewards.reward_function(
UAV_node, placed, loc, UAV_location, t, power_UAV, int(UAV_to_UAV_threshold // cell_size), int(radius_UAV // cell_size), N, M, set(ground_placed))
else:
index = move_endpoint.movement.map_2d_to_1d(loc, N)
if flag:
reward = reward_endpoint.rewards.reward_function_user(
UAV_node, placed, loc, UAV_location, t, power_UAV, int(UAV_to_UAV_threshold // cell_size), int(radius_UAV // cell_size), N, M, set(ground_placed))
else:
reward = reward_endpoint.rewards.reward_function(
UAV_node, placed, loc, UAV_location, t, power_UAV, int(UAV_to_UAV_threshold // cell_size), int(radius_UAV // cell_size), N, M, set(ground_placed))
Q[index, action] = Q[index, action] + learning_rate * \
(reward + decay_factor *
np.max(Q[index, :]) - Q[index, action])
epsilon_val *= decay_factor
max_reward = -1
max_pos = -1
for index, rows in enumerate(Q):
expected_max = np.max(rows)
if expected_max >= max_reward:
max_reward = expected_max
max_pos = index
x, y = move_endpoint.movement.map_1d_to_2d(max_pos, N, M)
return (x, y)
def done_simulation(ground_placed, placed):
"""
Function: done_simulation\n
Parameters: ground_placed -> list of users alredy placed, placed -> list of UAVs placed\n
Returns: True if simulation is done\n
"""
global coverage_threshold, similarity_threshold
ground_users = users_endpoint.users.get_number_ground_users()
done_user_connectivity = False
done_UAV_coverage = False
done_edge_similarity = False
if len(set(ground_placed)) / ground_users >= coverage_threshold:
done_user_connectivity = True
UAV_G = get_UAV_graph(placed)
common_lst, _, grn_edge_lst, _ = similarity_criteria(
UAV_G)
total_edge_grn_SG = len(grn_edge_lst)
if total_edge_grn_SG == 0:
total_edge_grn_SG = 1
if len(common_lst) / total_edge_grn_SG >= similarity_threshold:
done_edge_similarity = True
if nx.number_connected_components(UAV_G) == 1:
done_UAV_coverage = True
return done_user_connectivity and done_UAV_coverage and done_edge_similarity
def valid_loc(loc):
"""
Function: valid_loc\n
Parameter: loc -> location of the UAV being placed\n
Return: true if that location is not occupied\n
"""
global UAV_location
for node, location in UAV_location.items():
if location == loc:
return False
return True
def bruteforce(UAV_node, placed, flag):
"""
Function: bruteforce\n
Parameters: UAV_node -> UAV_node which is to be placed, placed -> list of already placed UAV_nodes, flag -> True if only user coverage is considered\n
Functionality: bruteforce all the grid location\n
"""
global N
global M
global radius_UAV
global UAV_location
global t
global power_UAV
global ground_placed
max_reward = -999999
max_pos = (-1, -1)
for i in range(N):
for j in range(M):
loc = (i, j)
if flag:
reward = reward_endpoint.rewards.reward_function_user(
UAV_node, placed, loc, UAV_location, t, power_UAV, int(UAV_to_UAV_threshold // cell_size), int(radius_UAV // cell_size), N, M, set(ground_placed))
else:
reward = reward_endpoint.rewards.reward_function(
UAV_node, placed, loc, UAV_location, t, power_UAV, int(UAV_to_UAV_threshold // cell_size), int(radius_UAV // cell_size), N, M, set(ground_placed))
if reward > max_reward and valid_loc(loc):
max_reward = reward
max_pos = loc
return max_pos
def consider_user_coverage():
"""
Function: consider_user_coverage\n
Parameters: None\n
Functionality: consider only user_coverage of the network\n
"""
global ground_placed
ground_users = users_endpoint.users.get_number_ground_users()
placed = [1]
unplaced = []
max_pos, max_density = users_endpoint.users.get_max_pos_density()
UAV_location[1] = max_pos
print(f'Placed UAV {1}')
user_list = users_endpoint.users.get_users_cell_connections(max_pos)
for user in user_list:
ground_placed.append(user)
for UAV_node in range(2, number_UAV + 1):
unplaced.append(UAV_node)
for UAV_node in unplaced:
if done_simulation(ground_placed, placed):
break
if len(set(ground_placed)) / ground_users >= coverage_threshold:
break
loc = bruteforce(UAV_node, placed, True)
UAV_location[UAV_node] = loc
placed.append(UAV_node)
print(f'Placed UAV {UAV_node}')
user_list = users_endpoint.users.get_users_cell_connections(loc)
for user in user_list:
ground_placed.append(user)
return placed
def reallocate(placed):
"""
Function: reallocate\n
Parameters: placed -> list of UAVs which are placed\n
Functionality: Checks if reallocating the UAVs meets the criteria or not
"""
global UAV_location, ground_placed
for UAV_node in placed:
if done_simulation(ground_placed, placed):
break
print(f'Trying to redeploy UAV {UAV_node}')
prev_loc = UAV_location[UAV_node]
prev_user_list = users_endpoint.users.get_users_cell_connections(
prev_loc)
UAV_G = get_UAV_graph(placed)
common_lst, _, grn_edge_lst, _ = similarity_criteria(
UAV_G)
total_edge_grn_SG = len(grn_edge_lst)
if total_edge_grn_SG == 0:
total_edge_grn_SG = 1
prev_edge_similarity = len(common_lst) / total_edge_grn_SG
prev_len_ground = len(set(ground_placed))
for user in prev_user_list:
if user in ground_placed:
ground_placed.remove(user)
loc = bruteforce(UAV_node, placed, False)
UAV_location[UAV_node] = loc
user_list = users_endpoint.users.get_users_cell_connections(loc)
for user in user_list:
ground_placed.append(user)
len_ground = len(set(ground_placed))
UAV_G = get_UAV_graph(placed)
common_lst, _, grn_edge_lst, _ = similarity_criteria(
UAV_G)
total_edge_grn_SG = len(grn_edge_lst)
if total_edge_grn_SG == 0:
total_edge_grn_SG = 1
edge_similarity = len(common_lst) / total_edge_grn_SG
print(f'Total User covered when UAV {UAV_node} was placed at {prev_loc} was {prev_len_ground}\nTotal User covered when UAV {UAV_node} is placed at {loc} is {len_ground}\nPrevious Edge Similarity was {round(prev_edge_similarity * 100, 2)}\nCurrent edge similarity is {round(edge_similarity * 100, 2)}')
global coverage_threshold
ground_users = users_endpoint.users.get_number_ground_users()
if edge_similarity >= prev_edge_similarity and (len_ground >= prev_len_ground or len_ground / ground_users >= coverage_threshold):
print(f'ReDeployed UAV {UAV_node}')
else:
UAV_location[UAV_node] = prev_loc
for user in prev_user_list:
ground_placed.append(user)
for user in user_list:
if user in ground_placed:
ground_placed.remove(user)
print(f'ReDeployment of UAV {UAV_node} failed')
def simulation(placed):
"""
Function: simulation\n
Parameters: placed -> list of placed UAVs\n
Functionality: Simulates the network\n
"""
global ground_placed
reallocate(placed)
if done_simulation(ground_placed, placed):
write_output(placed)
return
unplaced = []
for UAV_node in range(placed[-1] + 1, number_UAV + 1):
unplaced.append(UAV_node)
for UAV_node in unplaced:
loc = bruteforce(UAV_node, placed, False)
UAV_location[UAV_node] = bruteforce(UAV_node, placed, False)
placed.append(UAV_node)
print(f'Placed UAV {UAV_node}')
user_list = users_endpoint.users.get_users_cell_connections(loc)
for user in user_list:
ground_placed.append(user)
reallocate(placed)
if done_simulation(ground_placed, placed):
break
write_output(placed)
def get_UAV_graph(placed):
"""
Function: get_UAV_graph\n
Parameters: placed -> list of already placed ground users\n:
Returns: UAV graph at a particular point of time\n
"""
global UAV_to_UAV_threshold, cell_size, UAV_location
UAV_G = nx.Graph()
for node in placed:
UAV_G.add_node(node)
for node1 in placed:
for node2 in placed:
if move_endpoint.movement.get_euc_dist(UAV_location[node1], UAV_location[node2]) <= int(UAV_to_UAV_threshold // cell_size) and node1 != node2:
UAV_G.add_edge(node1, node2)
return UAV_G
def similarity_criteria(UAV_G):
"""
Function:similarity_criteria\n
Parameter: UAV_G -> Current UAV graph\n
Returns: A tuple of common edges, uncommon edges and edges which are in grn graph. Dictionary of reverse mapping is also returned\n
"""
grn_node_lst = [grn_endpoint.grn_info.m(node) for node in UAV_G.nodes]
reverse_mapping = {}
for node in UAV_G.nodes:
if grn_endpoint.grn_info.m(node) not in reverse_mapping:
reverse_mapping[grn_endpoint.grn_info.m(node)] = node
uncommon_lst = []
common_lst = []
grn_graph = grn_endpoint.grn_info.get_grn_graph()
grn_SG = grn_graph.subgraph(grn_node_lst)
grn_edge_lst = []
for edge in grn_SG.edges:
u, v = edge
if (u, v) not in grn_edge_lst and (v, u) not in grn_edge_lst:
grn_edge_lst.append((u, v))
for edge in grn_edge_lst:
u, v = edge
if (reverse_mapping[u], reverse_mapping[v]) in UAV_G.edges or (reverse_mapping[v], reverse_mapping[u]) in UAV_G.edges:
if (reverse_mapping[u], reverse_mapping[v]) not in common_lst and (reverse_mapping[v], reverse_mapping[u]) not in common_lst:
common_lst.append((reverse_mapping[u], reverse_mapping[v]))
else:
if (reverse_mapping[u], reverse_mapping[v]) not in uncommon_lst and (reverse_mapping[v], reverse_mapping[u]) not in uncommon_lst:
uncommon_lst.append((reverse_mapping[u], reverse_mapping[v]))
return (common_lst, uncommon_lst, grn_edge_lst, reverse_mapping)
def write_output(placed):
"""
Function: write_output
Parameters: placed -> list of already placed UAVs
Functionality: write the output to the respective files
"""
global radius_UAV, cell_size, UAV_to_UAV_threshold
main_file_name = os.getcwd()
parent_dir = os.path.join(main_file_name, 'output_files')
curr_dir = str(epsilon) + "_" + str(learning_rate) + \
"_" + str(decay_factor)
dir_path = os.path.join(parent_dir, curr_dir)
try:
os.mkdir(dir_path)
except OSError as error:
pass
file_num = len([name for name in os.listdir(
dir_path)])
os.chdir(dir_path)
if file_num == 0:
file_num = 1
text_file_name = 'Output_text' + str(file_num) + '.txt'
image_path = os.path.join(dir_path, 'images')
try:
os.mkdir(image_path)
except OSError as error:
pass
graph_file_name = 'Output_graph' + str(file_num) + '.pdf'
text_file_data = []
text_file_data.append(
f'Total Number of users served: {len(set(ground_placed))}\nList of users: {sorted(set(ground_placed))}\n')
text_file_data.append(f'Total number of UAV used: {len(UAV_location)}\n')
for UAV_node, loc in UAV_location.items():
text_file_data.append(
f'UAV: {UAV_node} can serve users: {users_endpoint.users.get_users_cell_connections(loc)} when placed at {loc}\n')
UAV_G = get_UAV_graph(placed)
total_edge = len(UAV_G.edges)
common_lst, uncommon_lst, grn_edge_lst, reverse_mapping = similarity_criteria(
UAV_G)
total_edge_grn_SG = len(grn_edge_lst)
if total_edge_grn_SG == 0:
total_edge_grn_SG = 1
if total_edge == 0:
total_edge = 1
if len(common_lst) > 0:
text_file_data.append(
f'Following are the edges which is present in both UAV and GRN netwrok: ({len(common_lst)})\n')
for edge in common_lst:
text_file_data.append(f'{edge}, ')
text_file_data.append(f'\n')
else:
text_file_data.append(f'No edge is common in UAV and GRN graph.\n')
if len(uncommon_lst) > 0:
text_file_data.append(
f'Following are the edges which is present in GRN but not in UAV network: ({len(uncommon_lst)})\n')
for edge in uncommon_lst:
text_file_data.append(f'{edge}, ')
text_file_data.append(f'\n')
else:
text_file_data.append(
f'There is no edge which is in GRN but not in the UAV graph\n')
text_file_data.append(
f'Total Number of edges (in UAV Topology): {total_edge}\nPercentage of edge which is both in GRN and UAV: {(len(common_lst) / total_edge_grn_SG) * 100}\n')
text_file_data.append(
f'Following are the edges (in GRN Subgraph): {[(reverse_mapping[u], reverse_mapping[v]) for (u, v) in grn_edge_lst]}\n')
text_file_data.append(
f'Total Number of edges (in GRN Subgraph): {total_edge_grn_SG}\n')
node_motif = grn_endpoint.grn_info.get_motif_dict(UAV_G)
for node, motif in node_motif.items():
text_file_data.append(f'Motif of UAV {node} is {motif}\n')
e_motif = {}
PI = 0
for edge in UAV_G.edges:
node1, node2 = edge
e_motif[edge] = min(node_motif[node1], node_motif[node2])
text_file_data.append(
f'Edge {edge} has edge motif centrality of {e_motif[edge]}\n')
PI = max(PI, e_motif[edge])
text_file_data.append(f'Maximum Edge motif centrality is {PI}\n')
UAV_topology = plt.figure(1)
nx.draw(UAV_G, with_labels=True)
global end_time
text_file_data.append(
f'Standard Deviation of distances between users: {users_endpoint.users.get_standard_deviation()}\n')
end_time = time.time()
text_file_data.append(
f'Total time to run the simulation: {end_time - start_time} seconds')
with open(text_file_name, 'w') as file_pointer:
file_pointer.writelines(text_file_data)
plt.close()
g_x, g_y = get_user_location(main_file_name)
UAV_guser_plot = plt.figure(2)
plt.scatter(g_x, g_y, color='gray')
UAV_x = []
UAV_y = []
rad = int(radius_UAV // cell_size) + 1
for node, loc in UAV_location.items():
a, b = loc
UAV_x.append(a)
UAV_y.append(b)
c = plt.Circle((a, b), rad, color='green', fill=False)
ax = plt.gca()
ax.add_artist(c)
plt.scatter(UAV_x, UAV_y, color='blue')
for idx in range(len(UAV_x)):
plt.annotate(f'{idx + 1}', (UAV_x[idx], UAV_y[idx]), color='black')
for edge in UAV_G.edges:
edge_x = []
edge_y = []
a, b = edge
loc_a = UAV_location[a]
loc_b = UAV_location[b]
x1, y1 = loc_a
x2, y2 = loc_b
edge_x = [x1, x2]
edge_y = [y1, y2]
plt.plot(edge_x, edge_y, color='blue')
for edge in common_lst:
edge_x = []
edge_y = []
a, b = edge
loc_a = UAV_location[a]
loc_b = UAV_location[b]
x1, y1 = loc_a
x2, y2 = loc_b
edge_x = [x1, x2]
edge_y = [y1, y2]
plt.plot(edge_x, edge_y, color='red')
plt.title('Overall Scenario Visualization', fontweight="bold")
plt.xlabel('N', fontweight='bold')
plt.ylabel('M', fontweight='bold')
pp = PdfPages(os.path.join(image_path, graph_file_name))
pp.savefig(UAV_topology, dpi=500, transparent=True)
pp.savefig(UAV_guser_plot, dpi=500, transparent=True)
pp.close()
graph_output_dir = os.path.join(main_file_name, 'graph_output_files')
file_num = len([name for name in os.listdir(graph_output_dir)])
file_name = os.path.join(graph_output_dir, f'output_main{file_num // 2}.json')
with open(file_name, 'w') as file_pointer:
json.dump(UAV_location, file_pointer)
def get_user_location(parent_dir):
"""
Function: get_user_location\n
Parameter: parent_dir -> path of current dir\n
Returns: Returns list of x and y coordinates of ground users\n
"""
dir_name = 'input_files'
file_name = 'user_input.json'
user_input = {}
with open(os.path.join(parent_dir, dir_name, file_name), 'r') as file_pointer:
user_input = json.load(file_pointer)
pos = user_input['Position of Ground users']
x = []
y = []
for item in pos:
a, b = map(float, item.split(' '))
x.append(a)
y.append(b)
return (x, y)
if __name__ == "__main__":
print(f'Initialiazing the environment')
init()
print(f'Initialiazed environment')
placed = consider_user_coverage()
if done_simulation(ground_placed, placed):
write_output(placed)
else:
simulation(placed)
| 35.48 | 309 | 0.659436 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 5,465 | 0.246449 |
2de4906cd9dd45c4ba1ae99f800a8a5ed211db6d | 2,384 | py | Python | tests/work_with_gdscript/test_native_call.py | curradium/godot-python | 11355dfd03a3dad80253b49c532fbb61a8b26d98 | [
"CC-BY-3.0"
] | null | null | null | tests/work_with_gdscript/test_native_call.py | curradium/godot-python | 11355dfd03a3dad80253b49c532fbb61a8b26d98 | [
"CC-BY-3.0"
] | null | null | null | tests/work_with_gdscript/test_native_call.py | curradium/godot-python | 11355dfd03a3dad80253b49c532fbb61a8b26d98 | [
"CC-BY-3.0"
] | null | null | null | # TODO:
# - allow inheritance from GDScript class
# - overload native method ?
import pytest
from godot.bindings import ResourceLoader, GDScript, PluginScript
def test_native_method(node):
original_name = node.get_name()
try:
node.set_name("foo")
name = node.get_name()
assert name == "foo"
finally:
node.set_name(original_name)
@pytest.mark.xfail
def test_overloaded_native_method(node, subnode):
expected = """
*
***
*****
|
"""
ret = node.print_tree()
assert ret == expected
ret = subnode.print_tree()
assert ret == expected
def test_node_ready_called(node):
assert node.is_ready_called()
def test_subnode_ready_called(subnode):
assert subnode.is_ready_called()
assert subnode.is_sub_ready_called()
def test_method_call(anynode):
ret = anynode.meth("foo")
assert ret == "foo"
def test_overloaded_method_call(subnode):
ret = subnode.overloaded_by_child_meth("foo")
assert ret == "sub:foo"
def test_property_without_default_value(anynode):
value = anynode.prop
assert value is None
def test_property(anynode):
anynode.prop = 42
value = anynode.prop
assert value == 42
@pytest.mark.xfail(reason="default value seems to be only set in .tscn")
def test_overloaded_property_default_value(pynode, pysubnode):
# Parent property
value = pynode.overloaded_by_child_prop
assert value == "default"
# Overloaded property
value = pysubnode.overloaded_by_child_prop
assert value == "sub:default"
def test_overloaded_property(pynode, pysubnode):
# Not supported by GDScript
# Parent property
pynode.overloaded_by_child_prop = "foo"
value = pynode.overloaded_by_child_prop
assert value == "foo"
# Overloaded property
pysubnode.overloaded_by_child_prop = "foo"
value = pysubnode.overloaded_by_child_prop
assert value == "sub:foo"
def test_static_method_call(node):
value = node.static_meth("foo")
assert value == "static:foo"
@pytest.mark.parametrize(
"path,expected_type",
[("res://gdnode.gd", GDScript), ("res://pynode.py", PluginScript)],
)
def test_load_script(path, expected_type):
script = ResourceLoader.load(path, "", False)
try:
assert isinstance(script, expected_type)
assert script.can_instance()
finally:
script.free()
| 23.145631 | 72 | 0.693372 | 0 | 0 | 0 | 0 | 909 | 0.381292 | 0 | 0 | 415 | 0.174077 |
2de4ee5bc7351ca57d213412fccfba40eb972d36 | 1,880 | py | Python | tests/python/unittest/test_arith_stmt_simplify.py | ndl/tvm | 6e4c6d7a3a840ae1f7f996c856357068ba7c68ee | [
"Apache-2.0"
] | 15 | 2019-05-02T00:06:28.000Z | 2022-03-25T03:11:14.000Z | tests/python/unittest/test_arith_stmt_simplify.py | ndl/tvm | 6e4c6d7a3a840ae1f7f996c856357068ba7c68ee | [
"Apache-2.0"
] | 5 | 2019-05-13T20:44:51.000Z | 2019-09-25T19:56:29.000Z | tests/python/unittest/test_arith_stmt_simplify.py | ndl/tvm | 6e4c6d7a3a840ae1f7f996c856357068ba7c68ee | [
"Apache-2.0"
] | 5 | 2019-03-06T19:54:18.000Z | 2022-02-01T14:27:58.000Z | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import tvm
def test_stmt_simplify():
ib = tvm.ir_builder.create()
A = ib.pointer("float32", name="A")
C = ib.pointer("float32", name="C")
n = tvm.var("n")
with ib.for_range(0, n, name="i") as i:
with ib.if_scope(i < 12):
A[i] = C[i]
body = tvm.stmt.LetStmt(n, 10, ib.get())
body = tvm.ir_pass.CanonicalSimplify(body)
assert isinstance(body.body, tvm.stmt.Store)
def test_thread_extent_simplify():
ib = tvm.ir_builder.create()
A = ib.pointer("float32", name="A")
C = ib.pointer("float32", name="C")
n = tvm.var("n")
tx = tvm.thread_axis("threadIdx.x")
ty = tvm.thread_axis("threadIdx.y")
ib.scope_attr(tx, "thread_extent", n)
ib.scope_attr(tx, "thread_extent", n)
ib.scope_attr(ty, "thread_extent", 1)
with ib.if_scope(tx + ty < 12):
A[tx] = C[tx + ty]
body = tvm.stmt.LetStmt(n, 10, ib.get())
body = tvm.ir_pass.CanonicalSimplify(body)
assert isinstance(body.body.body.body, tvm.stmt.Store)
if __name__ == "__main__":
test_stmt_simplify()
test_thread_extent_simplify()
| 35.471698 | 62 | 0.685106 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 907 | 0.482447 |
2de6561887dd2c5980bf1f012aa5a1b51a0aef2b | 3,350 | py | Python | worker/worker.py | cshenton/neuroevolution | 12f0d52513b2d9a91c96f9a4d9b4aafab1d95fe8 | [
"MIT"
] | 45 | 2018-04-18T21:37:12.000Z | 2020-09-28T22:35:08.000Z | worker/worker.py | cshenton/neuroevolution | 12f0d52513b2d9a91c96f9a4d9b4aafab1d95fe8 | [
"MIT"
] | null | null | null | worker/worker.py | cshenton/neuroevolution | 12f0d52513b2d9a91c96f9a4d9b4aafab1d95fe8 | [
"MIT"
] | 10 | 2018-04-19T10:13:03.000Z | 2022-02-02T14:06:59.000Z | """The Worker class, which manages running policy evaluations."""
import datetime
import grpc
import gym
import os
from google.protobuf import empty_pb2
from proto.neuroevolution_pb2 import Evaluation, Individual
from proto.neuroevolution_pb2_grpc import NeuroStub
from worker.policy import Policy
ENVIRONMENT = os.getenv("ENVIRONMENT", "Venture-v4")
HOST = os.getenv("HOST_ADDRESS", "127.0.0.1") + ":" + os.getenv("HOST_PORT", "8080")
MUTATION_STRENGTH = float(os.getenv("MUTATION_STRENGTH", "0.005"))
class Worker:
"""Worker manages the evaluation of candidate policies from the master server.
Attributes:
client (NeuroStub): The client stub to the master server.
env (gym.Env): The gym environment being evaluated.
policy (Policy): The policy network, with changeable weights.
strength (float): The genetic mutation strength.
"""
def __init__(self, env_name=ENVIRONMENT, strength=MUTATION_STRENGTH, host=HOST):
"""Creates a Worker instance.
Args:
env (string): The valid gym environment name.
host (string): The hostname of the master server.
strength (float): The genetic mutation strength.
"""
self.client = NeuroStub(grpc.insecure_channel(host))
self.env = gym.make(env_name)
self.policy = Policy(self.env.action_space.n)
self.strength = strength
print("Host:", host)
print("Environment:", env_name)
print("Mutation Strength:", strength)
def seek(self):
"""Gets a new set of seeds to try from the master server.
Returns:
seeds (list of ints): The seed sequence defining the next policy
to try out.
"""
return self.client.Seek(empty_pb2.Empty(), timeout=30).seeds
def show(self, seeds, score):
"""Sends the seeds and corresponding score to the master server.
Args:
seeds (list of ints): The seed sequence defining a policy.
score (float): The score it achieved on the environment.
"""
self.client.Show(
Evaluation(
individual=Individual(
seeds=seeds,
),
score=score,
),
timeout=30,
)
def run_one(self):
"""Gets, evaluates, and reports a policy."""
t = datetime.datetime.now()
seeds = self.seek()
self.policy.set_weights(seeds, self.strength)
setup_time = datetime.datetime.now() - t
t = datetime.datetime.now()
i = 0
score = 0
done = False
state = self.env.reset()
while not done:
action = self.policy.act(state)
state, reward, done, _ = self.env.step(action)
score += reward
i += 1
if i >= 20000:
break
self.show(seeds, score)
run_time = datetime.datetime.now() - t
print(
"Score: ", score,
"Seeds: ", seeds,
"Frames: ", i,
"Setup Time: ", setup_time,
"Run Time: ", run_time,
"FPS during run: ", i / run_time.total_seconds()
)
def run(self):
"""Repeatedly gets, evaluates, and reports a policy."""
while True:
self.run_one()
| 32.211538 | 84 | 0.584478 | 2,841 | 0.84806 | 0 | 0 | 0 | 0 | 0 | 0 | 1,389 | 0.414627 |
2de6c51e3b98221f5e9ca7c29a7762f77132e57d | 836 | py | Python | CAP4-Case study_interface design/docstring.py | falble/mythinkpython2 | 25de15c8657f32a8f85189d9cb0588c816e9e7d3 | [
"Apache-2.0"
] | 1 | 2020-11-20T16:28:32.000Z | 2020-11-20T16:28:32.000Z | CAP4-Case study_interface design/docstring.py | falble/mythinkpython2 | 25de15c8657f32a8f85189d9cb0588c816e9e7d3 | [
"Apache-2.0"
] | null | null | null | CAP4-Case study_interface design/docstring.py | falble/mythinkpython2 | 25de15c8657f32a8f85189d9cb0588c816e9e7d3 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Created on Sat Feb 16 17:05:36 2019
@author: Utente
"""
#Chapter 4
#docstring
import turtle
import math
bob = turtle.Turtle()
print(bob)
def polyline(t, n, lenght, angle):
#documentation string--->
"""Draws n line segments with the given lenght
and angle (in degrees) between them.
t is a turtle.
"""
#the aim is to simplify the code and to improve interface design
for i in range(n):
t.fd(lenght)
t.lt(angle)
polyline(bob, 4, 120, 90) #preconditions of the functions (should be satisfied by the caller)
turtle.mainloop() #in this case the square is the postcondition (should be satisfied by the function)
#intended effect (the segments)
#any side effects (like moving the turtle)
| 25.333333 | 102 | 0.62201 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 549 | 0.656699 |
2de89c57ba2b6a38a0def3e77a44733886f67fe3 | 378 | py | Python | Day02/part2.py | JavierRizzoA/AoC2021 | 948177b135f2570090cd3a13aafaa0199566248a | [
"Beerware"
] | null | null | null | Day02/part2.py | JavierRizzoA/AoC2021 | 948177b135f2570090cd3a13aafaa0199566248a | [
"Beerware"
] | null | null | null | Day02/part2.py | JavierRizzoA/AoC2021 | 948177b135f2570090cd3a13aafaa0199566248a | [
"Beerware"
] | null | null | null | x = 0
y = 0
aim = 0
with open('input') as f:
for line in f:
direction = line.split()[0]
magnitude = int(line.split()[1])
if direction == 'forward':
x += magnitude
y += aim * magnitude
elif direction == 'down':
aim += magnitude
elif direction == 'up':
aim -= magnitude
print(str(x * y))
| 23.625 | 40 | 0.481481 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 26 | 0.068783 |
2de8dc7fc9d8a2753db8ec50c0b95c8f4645d196 | 2,329 | py | Python | ddd_domain_driven_design/application/dto/generalisation/dtometa.py | pidevops/py-domain-driven-design | 86f5743e5a37628a2146e62bebd4489a93ad8f16 | [
"MIT"
] | 2 | 2019-11-14T15:10:00.000Z | 2020-05-12T07:33:37.000Z | ddd_domain_driven_design/application/dto/generalisation/dtometa.py | pidevops/py-domain-driven-design | 86f5743e5a37628a2146e62bebd4489a93ad8f16 | [
"MIT"
] | null | null | null | ddd_domain_driven_design/application/dto/generalisation/dtometa.py | pidevops/py-domain-driven-design | 86f5743e5a37628a2146e62bebd4489a93ad8f16 | [
"MIT"
] | null | null | null | from . import type_checker
from .dtodescriptor import DTODescriptor
class DTOMeta(type):
def __init__(cls, name, bases, namespace, partial: bool = False):
super().__init__(name, bases, namespace)
def __new__(cls, name, bases, class_dict, partial: bool = False):
descriptors = {k: v for k, v in class_dict.items() if isinstance(v, tuple)}
_ = [class_dict.pop(k, None) for k in descriptors]
class_dict['__slots__'] = set(list(descriptors.keys()) + ['_dto_descriptors',
'_initialized_dto_descriptors',
'_dto_descriptors_values',
'_field_validators',
'_partial'])
new_type = type.__new__(cls, name, bases, class_dict)
new_type._dto_descriptors = descriptors
new_type._field_validators = {}
new_type._partial = partial
for attr in new_type._dto_descriptors:
attr_type = new_type._dto_descriptors[attr][0]
descriptor_args = {}
if len(new_type._dto_descriptors[attr]) > 1:
descriptor_args = new_type._dto_descriptors[attr][1]
setattr(new_type, attr, DTODescriptor(dto_class_name=name, field=attr, type_=attr_type, **descriptor_args))
return new_type
def __instancecheck__(self, inst):
if type(inst) == type(self):
return True
if isinstance(inst, dict):
# Comparing a dictionary and a DTO
if not self._partial:
if len(inst.keys()) != len(self._dto_descriptors.keys()):
return False
for k, v in inst.items():
try:
type_checker._check_type(self._dto_descriptors[k][0], v)
except TypeError:
return False
else:
for k in self._dto_descriptors.keys():
try:
type_checker._check_type(self._dto_descriptors[k][0], inst[k])
except (TypeError, KeyError):
return False
return True
return False
| 43.943396 | 119 | 0.522542 | 2,259 | 0.969944 | 0 | 0 | 0 | 0 | 0 | 0 | 147 | 0.063117 |
2de971a92eef57fed46cbae27fb964015248a42c | 10,158 | py | Python | model.py | r-or/cnn-eyetrack | 93a09f209aa8d34defc82c8734d35d5ce56b9060 | [
"MIT"
] | null | null | null | model.py | r-or/cnn-eyetrack | 93a09f209aa8d34defc82c8734d35d5ce56b9060 | [
"MIT"
] | null | null | null | model.py | r-or/cnn-eyetrack | 93a09f209aa8d34defc82c8734d35d5ce56b9060 | [
"MIT"
] | null | null | null | #!/usr/bin/python3
import json
import pprint
import sys
import os
import numpy as np
import traceback
import random
import argparse
import json
import tensorflow
import keras
from keras import optimizers
from keras.models import Sequential
from keras.models import load_model
from keras.layers import Conv2D, MaxPooling2D
from keras.layers import Activation, Dropout, Flatten, Dense
from keras.layers.advanced_activations import LeakyReLU
from keras.preprocessing.image import img_to_array, load_img
from keras.callbacks import ModelCheckpoint, History
from PIL import Image
# start with PYTHONHASHSEED=89
np.random.seed(44)
random.seed(22)
tensorflow.set_random_seed(11)
# session_conf = tensorflow.ConfigProto(intra_op_parallelism_threads=1,
# inter_op_parallelism_threads=1)
# tf_sess = tensorflow.Session(graph=tensorflow.get_default_graph(), config=session_conf)
# keras.backend.set_session(tf_sess)
pp = pprint.PrettyPrinter()
modes = ['train', 'predict', 'validate']
aparser = argparse.ArgumentParser()
aparser.add_argument('-tSet', help='Choose source training set (from augmentation)')
aparser.add_argument('mode', help=str(modes))
aparser.add_argument('name', help='Name of this particular run')
aparser.add_argument('-augRunName', help='Name of the source augmentation')
aparser.add_argument('-ls', help='List all current models', action='store_true')
aparser.add_argument('-useTdata', help='Use training data for prediction/validation instead of validation data', action='store_true')
aparser.add_argument('-pFile', help='prediction mode: name of the image file to predict')
aparser.add_argument('-pathCap', help='Specify path to capture-output', nargs=1)
aparser.add_argument('-pathModel', help='Specify path to models', nargs=1)
aparser.add_argument('-pathAug', help='Specify path to augmentation-output', nargs=1)
aparser.add_argument('-save', help='Save config into cfg.json', action='store_true')
args = aparser.parse_args()
if os.path.exists('cfg.json'):
with open('cfg.json', 'r') as cfgfile:
cfg = json.load(cfgfile)
else:
cfg = {}
if args.pathCap or 'capturepath' not in cfg:
cfg['capturepath'] = args.pathCap
if args.pathModel or 'modelpath' not in cfg:
cfg['modelpath'] = args.pathModel
if args.pathAug or 'augpath' not in cfg:
cfg['augpath'] = args.pathAug
if args.tSet or 'tSet' not in cfg:
cfg['tSet'] = args.tSet
if args.name or 'nameOfRun' not in cfg:
cfg['nameOfRun'] = args.augRunName
if args.save:
with open('cfg.json', 'w') as cfgfile:
cfgfile.write(json.dumps(cfg, sort_keys=True, indent=2))
trainingSet = cfg['tSet']
mode = args.mode
nameofrun = args.name
predfile = args.pFile
srcT = args.useTdata
assert mode in modes
# paths
modelpath = cfg['modelpath']
if args.ls:
print('available runs: ' + str(os.listdir(os.path.join(modelpath, trainingSet))))
sys.exit()
outpath = os.path.join(modelpath, trainingSet, nameofrun)
modelPathBare = os.path.join(outpath, nameofrun)
cpmodelPathBare = os.path.join(outpath, 'chkp')
modelPath = modelPathBare + '.h5'
if not os.path.isdir(outpath):
os.makedirs(outpath)
if not os.path.isdir(cpmodelPathBare):
os.makedirs(cpmodelPathBare)
if len(os.listdir(cpmodelPathBare)):
cpmodelPath = os.path.join(cpmodelPathBare, sorted(os.listdir(cpmodelPathBare))[-1])
assert cpmodelPath.endswith('.h5')
else:
cpmodelPath = None
if not os.path.isfile(modelPath) and not cpmodelPath:
model = Sequential()
model.add(Conv2D(16, (3, 3), input_shape=(720, 1280, 3)))
model.add(LeakyReLU(alpha=.3))
model.add(MaxPooling2D(pool_size=(2, 3)))
model.add(Conv2D(32, (3, 3)))
model.add(LeakyReLU(alpha=.3))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Conv2D(32, (3, 3)))
model.add(LeakyReLU(alpha=.3))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Conv2D(64, (3, 3)))
model.add(LeakyReLU(alpha=.3))
model.add(MaxPooling2D(pool_size=(2, 2)))
# the model so far outputs 3D feature maps (height, width, features)
model.add(Flatten()) # this converts our 3D feature maps to 1D feature vectors
model.add(Dense(64, kernel_initializer='random_uniform'))
model.add(LeakyReLU(alpha=.3))
#model.add(Dropout(0.5))
model.add(Dense(2))
model.add(Activation('sigmoid'))
adaD = optimizers.Adadelta()
model.compile(loss='mse', optimizer=adaD)
startEpoch = 0
else:
# load model
if os.path.isfile(modelPath):
model = load_model(modelPath)
# load training cfg
if os.path.isfile(modelPathBare + '.json'):
with open(modelPathBare + '.json', 'r') as jsonfile:
modelcfg = json.load(jsonfile)
startEpoch = modelcfg['epochsTrained']
else:
startEpoch = 0
else:
model = load_model(cpmodelPath)
startEpoch = int(os.path.basename(cpmodelPath).split('.')[0])
scaleX = 1920 * 2
scaleY = 1080
with open(os.path.join(cfg['capturepath'], trainingSet + '.json')) as jsonfile:
trainingdata = json.load(jsonfile)
dset = {}
for d in trainingdata:
dset[d['f'].split('.')[0]] = (float(d['x']) / scaleX,
float(d['y']) / scaleY)
tset = {}
tfiles = []
vset = {}
vfiles = []
trainDir = os.path.join(cfg['augpath'], cfg['nameOfRun'] + '-train', 'images')
valDir = os.path.join(cfg['augpath'], cfg['nameOfRun'] + '-validate', 'images')
for f in os.listdir(trainDir):
tset[f] = dset[f.split('.')[0].split('_')[0]]
tfiles.append(f)
for f in os.listdir(valDir):
vset[f] = dset[f.split('.')[0]]
vfiles.append(f)
batch_size = min(16, len(tfiles) // 16)
print('{} training samples, {} validation samples'.format(len(tfiles), len(vfiles)))
print(' -> Batch size chosen: {}'.format(batch_size))
class DataGen(keras.utils.Sequence):
def __init__(self, filenames, path, labels, batchSize, dim, nChannels, shuffle=True):
self.dim = dim
self.batchSize = batchSize
self.labels = labels
self.filenames = filenames
self.path = path
self.nChannels = nChannels
self.shuffle = shuffle
self.on_epoch_end()
def __len__(self):
return int(np.floor(len(self.filenames) / self.batchSize))
def __getitem__(self, index):
indexes = self.indexes[index * self.batchSize : (index + 1) * self.batchSize]
fNamesTmp = [self.filenames[k] for k in indexes]
X, y = self.__data_generation(fNamesTmp)
return X, y
def on_epoch_end(self):
self.indexes = np.arange(len(self.filenames))
if self.shuffle:
np.random.shuffle(self.indexes)
def __data_generation(self, fNamesTmp):
X = np.empty((self.batchSize, *self.dim, self.nChannels))
Y = np.empty((self.batchSize, 2))
for idx, fname in enumerate(fNamesTmp):
img = load_img(os.path.join(self.path, fname))
x = img_to_array(img)
x.reshape((720, 1280, 3))
x *= 1.0/256.0
X[idx,] = x
Y[idx,] = np.asarray(self.labels[fname])
return X, Y
if mode == 'train':
training_generator = DataGen(tfiles, trainDir, tset, batch_size, (720, 1280), 3, shuffle=True)
validation_generator = DataGen(vfiles, valDir, vset, batch_size, (720, 1280), 3, shuffle=False)
checkpointer = ModelCheckpoint(filepath=os.path.join(cpmodelPathBare, '{epoch:03d}.h5'), verbose=1, save_best_only=True)
hist = History()
try:
model.fit_generator(training_generator,
steps_per_epoch=len(tfiles) // batch_size,
epochs=50,
validation_data=validation_generator,
validation_steps=len(vfiles) // batch_size,
max_queue_size=4,
workers=4,
initial_epoch=startEpoch,
callbacks=[checkpointer, hist])
except:
print()
traceback.print_exc()
finally:
print('hist: loss - validation loss')
if 'loss' in hist.history:
epochsTrained = len(hist.history['loss'])
for l, vl in zip(hist.history['loss'], hist.history['val_loss']):
print('{:.5f} - {:.5f}'.format(l, vl))
else:
print('N/A')
epochsTrained = 0
# always save your weights after training or during training
model.save(modelPath)
print('Saved model as "{}"'.format(modelPath))
with open(modelPathBare + '.json', 'w') as jsonfile:
jsonfile.write(json.dumps({'epochsTrained': epochsTrained + startEpoch}, sort_keys = True, indent = 2))
elif mode == 'predict':
# print(model.summary())
# pp.pprint(model.get_weights())
X = np.empty((1, 720, 1280, 3))
img = load_img(os.path.join(trainDir if srcT else valDir, sys.argv[4]))
x = img_to_array(img)
x.reshape((720, 1280, 3))
x = x / 256.0
X[0,] = x
output = model.predict(X, None, verbose=1)[0]
print('output: ({:.5f}, {:.5f}) - unscaled: ({:5.2f}, {:5.2f})'.format(output[0], output[1], output[0] * scaleX, output[1] * scaleY))
exp = np.asarray(tset[predfile] if srcT else vset[predfile])
print('expected: ({:.5f}, {:.5f}) - unscaled: ({:5.2f}, {:5.2f})'.format(exp[0], exp[1], exp[0] * scaleX, exp[1] * scaleY))
elif mode == 'validate':
if srcT:
files = tfiles
validation_generator = DataGen(files, trainDir, tset, batch_size, (720, 1280), 3, shuffle=False)
else:
files = vfiles
validation_generator = DataGen(files, valDir, vset, batch_size, (720, 1280), 3, shuffle=False)
predictions = model.predict_generator(validation_generator, verbose=1)
MSE = 0
for f, pred in zip(files, predictions):
exp = np.asarray(tset[f] if srcT else vset[f])
mse = ((exp[0] - pred[0])**2 + (exp[1] - pred[1])**2) / 2
print('{}: ({:.3f}, {:.3f}) -> ({:.3f}, {:.3f}) [mse: {:.3f}]'.format(f, exp[0], exp[1], pred[0], pred[1], mse))
MSE += mse
print('/MSE: {:.3f}'.format(MSE / len(files)))
| 35.767606 | 137 | 0.641366 | 1,316 | 0.129553 | 0 | 0 | 0 | 0 | 0 | 0 | 1,947 | 0.191672 |
2dea338f874c3ce26ae59e17c28ac999a26659a9 | 43,498 | py | Python | ec2driver.py | venumurthy/ec2-driver | 65b0482af3af80e34c00c327f54487d492933b25 | [
"Apache-2.0"
] | null | null | null | ec2driver.py | venumurthy/ec2-driver | 65b0482af3af80e34c00c327f54487d492933b25 | [
"Apache-2.0"
] | null | null | null | ec2driver.py | venumurthy/ec2-driver | 65b0482af3af80e34c00c327f54487d492933b25 | [
"Apache-2.0"
] | null | null | null | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2014 Thoughtworks.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either expressed or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Connection to the Amazon Web Services - EC2 service"""
from threading import Lock
import base64
import time
from boto import ec2
import boto.ec2.cloudwatch
from boto import exception as boto_exc
from boto.exception import EC2ResponseError
from boto.regioninfo import RegionInfo
from oslo.config import cfg
from novaclient.v1_1 import client
from ec2_rule_service import EC2RuleService
from ec2_rule_transformer import EC2RuleTransformer
from ec2driver_config import *
from nova.i18n import _
from nova import block_device
from nova.compute import power_state
from nova.compute import task_states
from nova import db
from nova import exception
from nova.image import glance
from nova.openstack.common import log as logging
from nova.openstack.common import loopingcall
from nova.virt import driver
from nova.virt import virtapi
from credentials import get_nova_creds
from instance_rule_refresher import InstanceRuleRefresher
from openstack_group_service import OpenstackGroupService
from openstack_rule_service import OpenstackRuleService
from openstack_rule_transformer import OpenstackRuleTransformer
import rule_comparator
from group_rule_refresher import GroupRuleRefresher
LOG = logging.getLogger(__name__)
ec2driver_opts = [
cfg.StrOpt('snapshot_image_format',
help='Snapshot image format (valid options are : '
'raw, qcow2, vmdk, vdi). '
'Defaults to same as source image'),
cfg.StrOpt('datastore_regex',
help='Regex to match the name of a datastore.'),
cfg.FloatOpt('task_poll_interval',
default=0.5,
help='The interval used for polling of remote tasks.'),
cfg.IntOpt('api_retry_count',
default=10,
help='The number of times we retry on failures, e.g., '
'socket error, etc.'),
cfg.IntOpt('vnc_port',
default=5900,
help='VNC starting port'),
cfg.IntOpt('vnc_port_total',
default=10000,
help='Total number of VNC ports'),
cfg.BoolOpt('use_linked_clone',
default=True,
help='Whether to use linked clone'),
cfg.StrOpt('ec2_secret_access_key',
help='The secret access key of the Amazon Web Services account'),
cfg.StrOpt('ec2_access_key_id',
help='The access key ID of the Amazon Web Services account'),
]
CONF = cfg.CONF
CONF.register_opts(ec2driver_opts, 'ec2driver')
CONF.import_opt('my_ip', 'nova.netconf')
TIME_BETWEEN_API_CALL_RETRIES = 1.0
EC2_STATE_MAP = {
"pending": power_state.BUILDING,
"running": power_state.RUNNING,
"shutting-down": power_state.NOSTATE,
"terminated": power_state.SHUTDOWN,
"stopping": power_state.NOSTATE,
"stopped": power_state.SHUTDOWN
}
DIAGNOSTIC_KEYS_TO_FILTER = ['group', 'block_device_mapping']
def set_nodes(nodes):
"""Sets EC2Driver's node.list.
It has effect on the following methods:
get_available_nodes()
get_available_resource
get_host_stats()
To restore the change, call restore_nodes()
"""
global _EC2_NODES
_EC2_NODES = nodes
def restore_nodes():
"""Resets EC2Driver's node list modified by set_nodes().
Usually called from tearDown().
"""
global _EC2_NODES
_EC2_NODES = [CONF.host]
class EC2Driver(driver.ComputeDriver):
capabilities = {
"has_imagecache": True,
"supports_recreate": True,
}
"""EC2 hypervisor driver. Respurposing for EC2"""
def __init__(self, virtapi, read_only=False):
super(EC2Driver, self).__init__(virtapi)
self.host_status_base = {
'vcpus': VCPUS,
'memory_mb': MEMORY_IN_MBS,
'local_gb': DISK_IN_GB,
'vcpus_used': 0,
'memory_mb_used': 0,
'local_gb_used': 100000000000,
'hypervisor_type': 'EC2',
'hypervisor_version': '1.0',
'hypervisor_hostname': CONF.host,
'cpu_info': {},
'disk_available_least': 500000000000,
}
self._mounts = {}
self._interfaces = {}
self.creds = get_nova_creds()
self.nova = client.Client(**self.creds)
region = RegionInfo(name=aws_region, endpoint=aws_endpoint)
self.ec2_conn = ec2.EC2Connection(aws_access_key_id=CONF.ec2driver.ec2_access_key_id,
aws_secret_access_key=CONF.ec2driver.ec2_secret_access_key,
host=host,
port=port,
region=region,
is_secure=secure)
self.cloudwatch_conn = ec2.cloudwatch.connect_to_region(
aws_region, aws_access_key_id=CONF.ec2driver.ec2_access_key_id, aws_secret_access_key=CONF.ec2driver.ec2_secret_access_key)
self.security_group_lock = Lock()
self.instance_rule_refresher = InstanceRuleRefresher(
GroupRuleRefresher(
ec2_connection=self.ec2_conn,
openstack_rule_service=OpenstackRuleService(
group_service=OpenstackGroupService(self.nova.security_groups),
openstack_rule_transformer=OpenstackRuleTransformer()
),
ec2_rule_service=EC2RuleService(
ec2_connection=self.ec2_conn,
ec2_rule_transformer=EC2RuleTransformer(self.ec2_conn)
)
)
)
if not '_EC2_NODES' in globals():
set_nodes([CONF.host])
def init_host(self, host):
"""Initialize anything that is necessary for the driver to function,
including catching up with currently running VM's on the given host.
"""
return
def list_instances(self):
"""Return the names of all the instances known to the virtualization
layer, as a list.
"""
all_instances = self.ec2_conn.get_all_instances()
instance_ids = []
for instance in all_instances:
instance_ids.append(instance.id)
return instance_ids
def plug_vifs(self, instance, network_info):
"""Plug VIFs into networks."""
pass
def unplug_vifs(self, instance, network_info):
"""Unplug VIFs from networks."""
pass
def spawn(self, context, instance, image_meta, injected_files,
admin_password, network_info=None, block_device_info=None):
"""Create a new instance/VM/domain on the virtualization platform.
Once this successfully completes, the instance should be
running (power_state.RUNNING).
If this fails, any partial instance should be completely
cleaned up, and the virtualization platform should be in the state
that it was before this call began.
:param context: security context <Not Yet Implemented>
:param instance: nova.objects.instance.Instance
This function should use the data there to guide
the creation of the new instance.
:param image_meta: image object returned by nova.image.glance that
defines the image from which to boot this instance
:param injected_files: User files to inject into instance.
:param admin_password: set in instance. <Not Yet Implemented>
:param network_info:
:py:meth:`~nova.network.manager.NetworkManager.get_instance_nw_info`
:param block_device_info: Information about block devices to be
attached to the instance.
"""
LOG.info("***** Calling SPAWN *******************")
LOG.info("****** %s" % instance._user_data)
LOG.info("****** Allocating an elastic IP *********")
elastic_ip_address = self.ec2_conn.allocate_address(domain='vpc')
#Creating the EC2 instance
flavor_type = flavor_map[instance.get_flavor().id]
#passing user_data from the openstack instance which is Base64 encoded after decoding it.
user_data = instance._user_data
if user_data:
user_data = base64.b64decode(user_data)
reservation = self.ec2_conn.run_instances(aws_ami, instance_type=flavor_type, subnet_id=ec2_subnet_id,
user_data=user_data)
ec2_instance = reservation.instances
ec2_id = ec2_instance[0].id
self._wait_for_state(instance, ec2_id, "running", power_state.RUNNING)
instance['metadata'].update({'ec2_id': ec2_id, 'public_ip_address': elastic_ip_address.public_ip})
LOG.info("****** Associating the elastic IP to the instance *********")
self.ec2_conn.associate_address(instance_id=ec2_id, allocation_id=elastic_ip_address.allocation_id)
def snapshot(self, context, instance, image_id, update_task_state):
"""Snapshot an image of the specified instance
on EC2 and create an Image which gets stored in AMI (internally in EBS Snapshot)
:param context: security context
:param instance: nova.objects.instance.Instance
:param image_id: Reference to a pre-created image that will hold the snapshot.
"""
LOG.info("***** Calling SNAPSHOT *******************")
if instance['metadata']['ec2_id'] is None:
raise exception.InstanceNotRunning(instance_id=instance['uuid'])
# Adding the below line only alters the state of the instance and not
# its image in OpenStack.
update_task_state(
task_state=task_states.IMAGE_UPLOADING, expected_state=task_states.IMAGE_SNAPSHOT)
ec2_id = instance['metadata']['ec2_id']
ec_instance_info = self.ec2_conn.get_only_instances(
instance_ids=[ec2_id], filters=None, dry_run=False, max_results=None)
ec2_instance = ec_instance_info[0]
if ec2_instance.state == 'running':
ec2_image_id = ec2_instance.create_image(name=str(
image_id), description="Image from OpenStack", no_reboot=False, dry_run=False)
LOG.info("Image has been created state to %s." % ec2_image_id)
# The instance will be in pending state when it comes up, waiting forit to be in available
self._wait_for_image_state(ec2_image_id, "available")
image_api = glance.get_default_image_service()
image_ref = glance.generate_image_url(image_id)
metadata = {'is_public': False,
'location': image_ref,
'properties': {
'kernel_id': instance['kernel_id'],
'image_state': 'available',
'owner_id': instance['project_id'],
'ramdisk_id': instance['ramdisk_id'],
'ec2_image_id': ec2_image_id }
}
image_api.update(context, image_id, metadata)
def reboot(self, context, instance, network_info, reboot_type,
block_device_info=None, bad_volumes_callback=None):
"""Reboot the specified instance.
After this is called successfully, the instance's state
goes back to power_state.RUNNING. The virtualization
platform should ensure that the reboot action has completed
successfully even in cases in which the underlying domain/vm
is paused or halted/stopped.
:param instance: nova.objects.instance.Instance
:param network_info:
:py:meth:`~nova.network.manager.NetworkManager.get_instance_nw_info`
:param reboot_type: Either a HARD or SOFT reboot
:param block_device_info: Info pertaining to attached volumes
:param bad_volumes_callback: Function to handle any bad volumes
encountered
"""
if reboot_type == 'SOFT':
self._soft_reboot(
context, instance, network_info, block_device_info)
elif reboot_type == 'HARD':
self._hard_reboot(
context, instance, network_info, block_device_info)
def _soft_reboot(self, context, instance, network_info, block_device_info=None):
LOG.info("***** Calling SOFT REBOOT *******************")
ec2_id = instance['metadata']['ec2_id']
self.ec2_conn.reboot_instances(instance_ids=[ec2_id], dry_run=False)
LOG.info("Soft Reboot Complete.")
def _hard_reboot(self, context, instance, network_info, block_device_info=None):
LOG.info("***** Calling HARD REBOOT *******************")
self.power_off(instance)
self.power_on(context, instance, network_info, block_device)
LOG.info("Hard Reboot Complete.")
@staticmethod
def get_host_ip_addr():
"""Retrieves the IP address of the dom0
"""
LOG.info("***** Calling get_host_ip_addr *******************")
return CONF.my_ip
def set_admin_password(self, instance, new_pass):
"""Boto doesn't support setting the password at the time of creating an instance.
hence not implemented.
"""
pass
def inject_file(self, instance, b64_path, b64_contents):
pass
def resume_state_on_host_boot(self, context, instance, network_info,
block_device_info=None):
pass
def rescue(self, context, instance, network_info, image_meta,
rescue_password):
pass
def unrescue(self, instance, network_info):
pass
def poll_rebooting_instances(self, timeout, instances):
pass
def migrate_disk_and_power_off(self, context, instance, dest,
instance_type, network_info,
block_device_info=None):
pass
def finish_revert_migration(self, context, instance, network_info,
block_device_info=None, power_on=True):
pass
def post_live_migration_at_destination(self, context, instance,
network_info,
block_migration=False,
block_device_info=None):
pass
def power_off(self, instance):
"""Power off the specified instance.
"""
LOG.info("***** Calling POWER OFF *******************")
ec2_id = instance['metadata']['ec2_id']
self.ec2_conn.stop_instances(
instance_ids=[ec2_id], force=False, dry_run=False)
self._wait_for_state(instance, ec2_id, "stopped", power_state.SHUTDOWN)
def power_on(self, context, instance, network_info, block_device_info):
"""Power on the specified instance.
"""
LOG.info("***** Calling POWER ON *******************")
ec2_id = instance['metadata']['ec2_id']
self.ec2_conn.start_instances(instance_ids=[ec2_id], dry_run=False)
self._wait_for_state(instance, ec2_id, "running", power_state.RUNNING)
def soft_delete(self, instance):
"""Deleting the specified instance
"""
self.destroy(instance)
def restore(self, instance):
pass
def pause(self, instance):
"""Boto doesn't support pause and cannot save system state and hence we've implemented the closest functionality
which is to poweroff the instance.
:param instance: nova.objects.instance.Instance
"""
self.power_off(instance)
def unpause(self, instance):
"""Since Boto doesn't support pause and cannot save system state, we had implemented the closest functionality
which is to poweroff the instance. and powering on such an instance in this method.
:param instance: nova.objects.instance.Instance
"""
self.power_on(
context=None, instance=instance, network_info=None, block_device_info=None)
def suspend(self, instance):
"""Boto doesn't support suspend and cannot save system state and hence we've implemented the closest
functionality which is to poweroff the instance.
:param instance: nova.objects.instance.Instance
"""
self.power_off(instance)
def resume(self, context, instance, network_info, block_device_info=None):
"""Since Boto doesn't support suspend and we cannot save system state, we've implemented the closest
functionality which is to power on the instance.
:param instance: nova.objects.instance.Instance
"""
self.power_on(context, instance, network_info, block_device_info)
def destroy(self, context, instance, network_info, block_device_info=None,
destroy_disks=True, migrate_data=None):
"""Destroy the specified instance from the Hypervisor.
If the instance is not found (for example if networking failed), this
function should still succeed. It's probably a good idea to log a
warning in that case.
:param context: security context
:param instance: Instance object as returned by DB layer.
:param network_info:
:py:meth:`~nova.network.manager.NetworkManager.get_instance_nw_info`
:param block_device_info: Information about block devices that should
be detached from the instance.
:param destroy_disks: Indicates if disks should be destroyed
:param migrate_data: implementation specific params
"""
LOG.info("***** Calling DESTROY *******************")
if 'ec2_id' not in instance['metadata']:
LOG.warning(_("Key '%s' not in EC2 instances") % instance['name'], instance=instance)
return
elif 'public_ip' not in instance['metadata'] and 'public_ip_address' not in instance['metadata']:
print instance['metadata']
LOG.warning(_("Public IP is null"), instance=instance)
return
else:
# Deleting the instance from EC2
ec2_id = instance['metadata']['ec2_id']
try:
ec2_instances = self.ec2_conn.get_only_instances(instance_ids=[ec2_id])
except Exception:
return
if ec2_instances.__len__() == 0:
LOG.warning(_("EC2 instance with ID %s not found") % ec2_id, instance=instance)
return
else:
# get the elastic ip associated with the instance & disassociate
# it, and release it
elastic_ip_address = \
self.ec2_conn.get_all_addresses(addresses=instance['metadata']['public_ip_address'])[0]
LOG.info("****** Disassociating the elastic IP *********")
self.ec2_conn.disassociate_address(elastic_ip_address.public_ip)
self.ec2_conn.stop_instances(instance_ids=[ec2_id], force=True)
self.ec2_conn.terminate_instances(instance_ids=[ec2_id])
self._wait_for_state(instance, ec2_id, "terminated", power_state.SHUTDOWN)
LOG.info("****** Releasing the elastic IP ************")
self.ec2_conn.release_address(allocation_id=elastic_ip_address.allocation_id)
def attach_volume(self, context, connection_info, instance, mountpoint,
disk_bus=None, device_type=None, encryption=None):
"""Attach the disk to the instance at mountpoint using info.
"""
instance_name = instance['name']
if instance_name not in self._mounts:
self._mounts[instance_name] = {}
self._mounts[instance_name][mountpoint] = connection_info
volume_id = connection_info['data']['volume_id']
# ec2 only attaches volumes at /dev/sdf through /dev/sdp
self.ec2_conn.attach_volume(volume_map[volume_id], instance['metadata']['ec2_id'], "/dev/sdn", dry_run=False)
def detach_volume(self, connection_info, instance, mountpoint, encryption=None):
"""Detach the disk attached to the instance.
"""
try:
del self._mounts[instance['name']][mountpoint]
except KeyError:
pass
volume_id = connection_info['data']['volume_id']
self.ec2_conn.detach_volume(volume_map[volume_id], instance_id=instance['metadata']['ec2_id'],
device="/dev/sdn", force=False, dry_run=False)
def swap_volume(self, old_connection_info, new_connection_info,
instance, mountpoint):
"""Replace the disk attached to the instance.
"""
instance_name = instance['name']
if instance_name not in self._mounts:
self._mounts[instance_name] = {}
self._mounts[instance_name][mountpoint] = new_connection_info
old_volume_id = old_connection_info['data']['volume_id']
new_volume_id = new_connection_info['data']['volume_id']
self.detach_volume(old_connection_info, instance, mountpoint)
# wait for the old volume to detach successfully to make sure
# /dev/sdn is available for the new volume to be attached
time.sleep(60)
self.ec2_conn.attach_volume(volume_map[new_volume_id], instance['metadata']['ec2_id'], "/dev/sdn",
dry_run=False)
return True
def attach_interface(self, instance, image_meta, vif):
if vif['id'] in self._interfaces:
raise exception.InterfaceAttachFailed('duplicate')
self._interfaces[vif['id']] = vif
def detach_interface(self, instance, vif):
try:
del self._interfaces[vif['id']]
except KeyError:
raise exception.InterfaceDetachFailed('not attached')
def get_info(self, instance):
"""Get the current status of an instance, by name (not ID!)
:param instance: nova.objects.instance.Instance object
Returns a dict containing:
:state: the running state, one of the power_state codes
:max_mem: (int) the maximum memory in KBytes allowed
:mem: (int) the memory in KBytes used by the domain
:num_cpu: (int) the number of virtual CPUs for the domain
:cpu_time: (int) the CPU time used in nanoseconds
"""
LOG.info("*************** GET INFO ********************")
if 'metadata' not in instance or 'ec2_id' not in instance['metadata']:
raise exception.InstanceNotFound(instance_id=instance['name'])
ec2_id = instance['metadata']['ec2_id']
ec2_instances = self.ec2_conn.get_only_instances(instance_ids=[ec2_id], filters=None, dry_run=False,
max_results=None)
if ec2_instances.__len__() == 0:
LOG.warning(_("EC2 instance with ID %s not found") % ec2_id, instance=instance)
raise exception.InstanceNotFound(instance_id=instance['name'])
ec2_instance = ec2_instances[0]
LOG.info(ec2_instance)
LOG.info("state %s max_mem %s mem %s flavor %s" %
(EC2_STATE_MAP.get(ec2_instance.state), ec2_instance.ramdisk, ec2_instance.get_attribute('ramdisk', dry_run=False), ec2_instance.instance_type))
return {'state': EC2_STATE_MAP.get(ec2_instance.state),
'max_mem': ec2_instance.ramdisk,
'mem': ec2_instance.get_attribute('ramdisk', dry_run=False),
'num_cpu': 2,
'cpu_time': 0}
def allow_key(self, key):
for key_to_filter in DIAGNOSTIC_KEYS_TO_FILTER:
if key == key_to_filter:
return False
return True
def get_diagnostics(self, instance_name):
"""Return data about VM diagnostics.
"""
LOG.info("******* GET DIAGNOSTICS *********************************************")
instance = self.nova.servers.get(instance_name)
ec2_id = instance.metadata['ec2_id']
ec2_instances = self.ec2_conn.get_only_instances(instance_ids=[ec2_id], filters=None, dry_run=False,
max_results=None)
if ec2_instances.__len__() == 0:
LOG.warning(_("EC2 instance with ID %s not found") % ec2_id, instance=instance)
raise exception.InstanceNotFound(instance_id=instance['name'])
ec2_instance = ec2_instances[0]
diagnostics = {}
for key, value in ec2_instance.__dict__.items():
if self.allow_key(key):
diagnostics['instance.' + key] = str(value)
metrics = self.cloudwatch_conn.list_metrics(dimensions={'InstanceId': ec2_id})
import datetime
for metric in metrics:
end = datetime.datetime.utcnow()
start = end - datetime.timedelta(hours=1)
details = metric.query(start, end, 'Average', None, 3600)
if len(details) > 0:
diagnostics['metrics.' + str(metric)] = details[0]
return diagnostics
def get_all_bw_counters(self, instances):
"""Return bandwidth usage counters for each interface on each
running VM.
"""
bw = []
return bw
def get_all_volume_usage(self, context, compute_host_bdms):
"""Return usage info for volumes attached to vms on
a given host.
"""
volusage = []
return volusage
def block_stats(self, instance_name, disk_id):
return [0L, 0L, 0L, 0L, None]
def interface_stats(self, instance_name, iface_id):
return [0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L]
def get_console_output(self, instance):
return 'EC2 CONSOLE OUTPUT\nANOTHER\nLAST LINE'
def get_vnc_console(self, instance):
return {'internal_access_path': 'EC2',
'host': 'EC2vncconsole.com',
'port': 6969}
def get_spice_console(self, instance):
return {'internal_access_path': 'EC2',
'host': 'EC2spiceconsole.com',
'port': 6969,
'tlsPort': 6970}
def get_console_pool_info(self, console_type):
return {'address': '127.0.0.1',
'username': 'EC2user',
'password': 'EC2password'}
def _get_ec2_instance_ids_with_security_group(self, ec2_security_group):
return [instance.id for instance in ec2_security_group.instances()]
def _get_openstack_instances_with_security_group(self, openstack_security_group):
return [instance for instance in (self.nova.servers.list())
if openstack_security_group.name in [group['name'] for group in instance.security_groups]]
def _get_id_of_ec2_instance_to_update_security_group(self, ec2_instance_ids_for_security_group,
ec2_ids_for_openstack_instances_for_security_group):
return (set(ec2_ids_for_openstack_instances_for_security_group).symmetric_difference(
set(ec2_instance_ids_for_security_group))).pop()
def _should_add_security_group_to_instance(self, ec2_instance_ids_for_security_group,
ec2_ids_for_openstack_instances_for_security_group):
return len(ec2_instance_ids_for_security_group) < len(ec2_ids_for_openstack_instances_for_security_group)
def _add_security_group_to_instance(self, ec2_instance_id, ec2_security_group):
security_group_ids_for_instance = self._get_ec2_security_group_ids_for_instance(ec2_instance_id)
security_group_ids_for_instance.append(ec2_security_group.id)
self.ec2_conn.modify_instance_attribute(ec2_instance_id, "groupSet", security_group_ids_for_instance)
def _remove_security_group_from_instance(self, ec2_instance_id, ec2_security_group):
security_group_ids_for_instance = self._get_ec2_security_group_ids_for_instance(ec2_instance_id)
security_group_ids_for_instance.remove(ec2_security_group.id)
self.ec2_conn.modify_instance_attribute(ec2_instance_id, "groupSet", security_group_ids_for_instance)
def _get_ec2_security_group_ids_for_instance(self, ec2_instance_id):
security_groups_for_instance = self.ec2_conn.get_instance_attribute(ec2_instance_id, "groupSet")['groupSet']
security_group_ids_for_instance = [group.id for group in security_groups_for_instance]
return security_group_ids_for_instance
def _get_or_create_ec2_security_group(self, openstack_security_group):
try:
return self.ec2_conn.get_all_security_groups(openstack_security_group.name)[0]
except (EC2ResponseError, IndexError) as e:
LOG.warning(e)
return self.ec2_conn.create_security_group(openstack_security_group.name,
openstack_security_group.description)
def refresh_security_group_rules(self, security_group_id):
"""This method is called after a change to security groups.
All security groups and their associated rules live in the datastore,
and calling this method should apply the updated rules to instances
running the specified security group.
An error should be raised if the operation cannot complete.
"""
LOG.info("************** REFRESH SECURITY GROUP RULES ******************")
openstack_security_group = self.nova. security_groups.get(security_group_id)
ec2_security_group = self._get_or_create_ec2_security_group(openstack_security_group)
ec2_ids_for_ec2_instances_with_security_group = self._get_ec2_instance_ids_with_security_group(
ec2_security_group)
ec2_ids_for_openstack_instances_with_security_group = [
instance.metadata['ec2_id'] for instance
in self._get_openstack_instances_with_security_group(openstack_security_group)
]
self.security_group_lock.acquire()
try:
ec2_instance_to_update = self._get_id_of_ec2_instance_to_update_security_group(
ec2_ids_for_ec2_instances_with_security_group,
ec2_ids_for_openstack_instances_with_security_group
)
should_add_security_group = self._should_add_security_group_to_instance(
ec2_ids_for_ec2_instances_with_security_group,
ec2_ids_for_openstack_instances_with_security_group)
if should_add_security_group:
self._add_security_group_to_instance(ec2_instance_to_update, ec2_security_group)
else:
self._remove_security_group_from_instance(ec2_instance_to_update, ec2_security_group)
finally:
self.security_group_lock.release()
return True
def refresh_security_group_members(self, security_group_id):
LOG.info("************** REFRESH SECURITY GROUP MEMBERS ******************")
LOG.info(security_group_id)
return True
def _get_allowed_group_name_from_openstack_rule_if_present(self, openstack_rule):
return openstack_rule['group']['name'] if 'name' in openstack_rule['group'] else None
def _get_allowed_ip_range_from_openstack_rule_if_present(self, openstack_rule):
return openstack_rule['ip_range']['cidr'] if 'cidr' in openstack_rule['ip_range'] else None
def refresh_instance_security_rules(self, instance):
LOG.info("************** REFRESH INSTANCE SECURITY RULES ******************")
LOG.info(instance)
# TODO: lock for case when group is associated with multiple instances [Cameron & Ed]
self.instance_rule_refresher.refresh(self.nova.servers.get(instance['id']))
return
def refresh_provider_fw_rules(self):
pass
def get_available_resource(self, nodename):
"""Retrieve resource information.
Updates compute manager resource info on ComputeNode table.
This method is called when nova-compute launches and as part of a periodic task that records results in the DB.
Since we don't have a real hypervisor, pretend we have lots of disk and ram.
:param nodename:
node which the caller want to get resources from
a driver that manages only one node can safely ignore this
:returns: Dictionary describing resources
"""
LOG.info("************** GET_AVAILABLE_RESOURCE ******************")
if nodename not in _EC2_NODES:
return {}
dic = {'vcpus': VCPUS,
'memory_mb': MEMORY_IN_MBS,
'local_gb': DISK_IN_GB,
'vcpus_used': 0,
'memory_mb_used': 0,
'local_gb_used': 0,
'hypervisor_type': 'EC2',
'hypervisor_version': '1.0',
'hypervisor_hostname': nodename,
'disk_available_least': 0,
'cpu_info': '?'}
return dic
def ensure_filtering_rules_for_instance(self, instance_ref, network_info):
return
def get_instance_disk_info(self, instance_name):
return
def live_migration(self, context, instance_ref, dest,
post_method, recover_method, block_migration=False,
migrate_data=None):
post_method(context, instance_ref, dest, block_migration,
migrate_data)
return
def check_can_live_migrate_destination_cleanup(self, ctxt,
dest_check_data):
return
def check_can_live_migrate_destination(self, ctxt, instance_ref,
src_compute_info, dst_compute_info,
block_migration=False,
disk_over_commit=False):
return {}
def check_can_live_migrate_source(self, ctxt, instance_ref,
dest_check_data):
return
def finish_migration(self, context, migration, instance, disk_info,
network_info, image_meta, resize_instance,
block_device_info=None, power_on=True):
"""Completes a resize
:param migration: the migrate/resize information
:param instance: nova.objects.instance.Instance being migrated/resized
:param power_on: is True the instance should be powered on
"""
LOG.info("***** Calling FINISH MIGRATION *******************")
ec2_id = instance['metadata']['ec2_id']
ec_instance_info = self.ec2_conn.get_only_instances(
instance_ids=[ec2_id], filters=None, dry_run=False, max_results=None)
ec2_instance = ec_instance_info[0]
# EC2 instance needs to be stopped to modify it's attribute. So we stop the instance,
# modify the instance type in this case, and then restart the instance.
ec2_instance.stop()
self._wait_for_state(instance, ec2_id, "stopped", power_state.SHUTDOWN)
new_instance_type = flavor_map[migration['new_instance_type_id']]
ec2_instance.modify_attribute('instanceType', new_instance_type)
def confirm_migration(self, migration, instance, network_info):
"""Confirms a resize, destroying the source VM.
:param instance: nova.objects.instance.Instance
"""
LOG.info("***** Calling CONFIRM MIGRATION *******************")
ec2_id = instance['metadata']['ec2_id']
ec_instance_info = self.ec2_conn.get_only_instances(
instance_ids=[ec2_id], filters=None, dry_run=False, max_results=None)
ec2_instance = ec_instance_info[0]
ec2_instance.start()
self._wait_for_state(instance, ec2_id, "running", power_state.RUNNING)
def pre_live_migration(self, context, instance_ref, block_device_info,
network_info, disk, migrate_data=None):
return
def unfilter_instance(self, instance_ref, network_info):
return
def get_host_stats(self, refresh=False):
"""Return EC2 Host Status of name, ram, disk, network."""
stats = []
for nodename in _EC2_NODES:
host_status = self.host_status_base.copy()
host_status['hypervisor_hostname'] = nodename
host_status['host_hostname'] = nodename
host_status['host_name_label'] = nodename
host_status['hypervisor_type'] = 'Amazon-EC2'
host_status['vcpus'] = VCPUS
host_status['memory_mb'] = MEMORY_IN_MBS
host_status['local_gb'] = DISK_IN_GB
stats.append(host_status)
if len(stats) == 0:
raise exception.NovaException("EC2Driver has no node")
elif len(stats) == 1:
return stats[0]
else:
return stats
def host_power_action(self, host, action):
"""Reboots, shuts down or powers up the host."""
return action
def host_maintenance_mode(self, host, mode):
"""Start/Stop host maintenance window. On start, it triggers
guest VMs evacuation.
"""
if not mode:
return 'off_maintenance'
return 'on_maintenance'
def set_host_enabled(self, host, enabled):
"""Sets the specified host's ability to accept new instances."""
if enabled:
return 'enabled'
return 'disabled'
def get_disk_available_least(self):
pass
def add_to_aggregate(self, context, aggregate, host, **kwargs):
pass
def remove_from_aggregate(self, context, aggregate, host, **kwargs):
pass
def get_volume_connector(self, instance):
return {'ip': '127.0.0.1', 'initiator': 'EC2', 'host': 'EC2host'}
def get_available_nodes(self, refresh=False):
return _EC2_NODES
def instance_on_disk(self, instance):
return False
def list_instance_uuids(self):
return []
def _wait_for_state(self, instance, ec2_id, desired_state, desired_power_state):
"""Wait for the state of the corrosponding ec2 instance to be in completely available state.
:params:ec2_id: the instance's corrosponding ec2 id.
:params:desired_state: the desired state of the instance to be in.
"""
def _wait_for_power_state():
"""Called at an interval until the VM is running again.
"""
ec2_instance = self.ec2_conn.get_only_instances(instance_ids=[ec2_id])
state = ec2_instance[0].state
if state == desired_state:
LOG.info("Instance has changed state to %s." % desired_state)
raise loopingcall.LoopingCallDone()
def _wait_for_status_check():
"""Power state of a machine might be ON, but status check is the one which gives the real
"""
ec2_instance = self.ec2_conn.get_all_instance_status(instance_ids=[ec2_id])[0]
if ec2_instance.system_status.status == 'ok':
LOG.info("Instance status check is %s / %s" %
(ec2_instance.system_status.status, ec2_instance.instance_status.status))
raise loopingcall.LoopingCallDone()
#waiting for the power state to change
timer = loopingcall.FixedIntervalLoopingCall(_wait_for_power_state)
timer.start(interval=1).wait()
#waiting for the status of the machine to be in running
if desired_state == 'running':
timer = loopingcall.FixedIntervalLoopingCall(_wait_for_status_check)
timer.start(interval=0.5).wait()
def _wait_for_image_state(self, ami_id, desired_state):
"""Timer to wait for the image/snapshot to reach a desired state
:params:ami_id: correspoding image id in Amazon
:params:desired_state: the desired new state of the image to be in.
"""
def _wait_for_state():
"""Called at an interval until the AMI image is available."""
try:
images = self.ec2_conn.get_all_images(image_ids=[ami_id], owners=None,
executable_by=None, filters=None, dry_run=None)
state = images[0].state
# LOG.info("\n\n\nImage id = %s" % ami_id + ", state = %s\n\n\n" % state)
if state == desired_state:
LOG.info("Image has changed state to %s." % desired_state)
raise loopingcall.LoopingCallDone()
except boto_exc.EC2ResponseError:
pass
timer = loopingcall.FixedIntervalLoopingCall(_wait_for_state)
timer.start(interval=0.5).wait()
class EC2VirtAPI(virtapi.VirtAPI):
def instance_update(self, context, instance_uuid, updates):
return db.instance_update_and_get_original(context,
instance_uuid,
updates)
def aggregate_get_by_host(self, context, host, key=None):
return db.aggregate_get_by_host(context, host, key=key)
def aggregate_metadata_add(self, context, aggregate, metadata,
set_delete=False):
return db.aggregate_metadata_add(context, aggregate['id'], metadata,
set_delete=set_delete)
def aggregate_metadata_delete(self, context, aggregate, key):
return db.aggregate_metadata_delete(context, aggregate['id'], key)
def security_group_get_by_instance(self, context, instance):
return db.security_group_get_by_instance(context, instance['uuid'])
def security_group_rule_get_by_security_group(self, context,
security_group):
return db.security_group_rule_get_by_security_group(
context, security_group['id'])
def provider_fw_rule_get_all(self, context):
return db.provider_fw_rule_get_all(context)
def agent_build_get_by_triple(self, context, hypervisor, os, architecture):
return db.agent_build_get_by_triple(context,
hypervisor, os, architecture)
def instance_type_get(self, context, instance_type_id):
return db.instance_type_get(context, instance_type_id)
def block_device_mapping_get_all_by_instance(self, context, instance,
legacy=True):
bdms = db.block_device_mapping_get_all_by_instance(context,
instance['uuid'])
if legacy:
bdms = block_device.legacy_mapping(bdms)
return bdms
def block_device_mapping_update(self, context, bdm_id, values):
return db.block_device_mapping_update(context, bdm_id, values)
| 43.324701 | 161 | 0.635271 | 39,482 | 0.907674 | 0 | 0 | 198 | 0.004552 | 0 | 0 | 13,851 | 0.318428 |
2dec3a7fdb0143127ec3b1adee9a1d3e5ba90910 | 1,239 | py | Python | output/models/ms_data/additional/isdefault072_xsd/isdefault072.py | tefra/xsdata-w3c-tests | b6b6a4ac4e0ab610e4b50d868510a8b7105b1a5f | [
"MIT"
] | 1 | 2021-08-14T17:59:21.000Z | 2021-08-14T17:59:21.000Z | output/models/ms_data/additional/isdefault072_xsd/isdefault072.py | tefra/xsdata-w3c-tests | b6b6a4ac4e0ab610e4b50d868510a8b7105b1a5f | [
"MIT"
] | 4 | 2020-02-12T21:30:44.000Z | 2020-04-15T20:06:46.000Z | output/models/ms_data/additional/isdefault072_xsd/isdefault072.py | tefra/xsdata-w3c-tests | b6b6a4ac4e0ab610e4b50d868510a8b7105b1a5f | [
"MIT"
] | null | null | null | from dataclasses import dataclass, field
from typing import List
from xml.etree.ElementTree import QName
__NAMESPACE__ = "http://schemas.microsoft.com/2003/10/Serialization/"
@dataclass
class Array:
class Meta:
namespace = "http://schemas.microsoft.com/2003/10/Serialization/"
item: List[object] = field(
default_factory=list,
metadata={
"name": "Item",
"type": "Element",
"namespace": "",
"nillable": True,
}
)
item_type: QName = field(
default=QName("{http://www.w3.org/2001/XMLSchema}anyType"),
metadata={
"name": "ItemType",
"type": "Attribute",
"namespace": "http://schemas.microsoft.com/2003/10/Serialization/",
}
)
dimensions: List[int] = field(
default_factory=lambda: [
1,
],
metadata={
"name": "Dimensions",
"type": "Attribute",
"tokens": True,
}
)
lower_bounds: List[int] = field(
default_factory=lambda: [
0,
],
metadata={
"name": "LowerBounds",
"type": "Attribute",
"tokens": True,
}
)
| 24.78 | 79 | 0.514124 | 1,049 | 0.846651 | 0 | 0 | 1,060 | 0.855529 | 0 | 0 | 383 | 0.30912 |
2deca87ea9f9f2cd6faccaa2e1e2be6b9753edc0 | 54,161 | py | Python | pysnmp/PRIVATE-SW0657840-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 11 | 2021-02-02T16:27:16.000Z | 2021-08-31T06:22:49.000Z | pysnmp/PRIVATE-SW0657840-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 75 | 2021-02-24T17:30:31.000Z | 2021-12-08T00:01:18.000Z | pysnmp/PRIVATE-SW0657840-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 10 | 2019-04-30T05:51:36.000Z | 2022-02-16T03:33:41.000Z | #
# PySNMP MIB module PRIVATE-SW0657840-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/PRIVATE-SW0657840-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 20:33:18 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
OctetString, Integer, ObjectIdentifier = mibBuilder.importSymbols("ASN1", "OctetString", "Integer", "ObjectIdentifier")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ConstraintsUnion, ValueRangeConstraint, SingleValueConstraint, ConstraintsIntersection, ValueSizeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsUnion", "ValueRangeConstraint", "SingleValueConstraint", "ConstraintsIntersection", "ValueSizeConstraint")
ifIndex, = mibBuilder.importSymbols("IF-MIB", "ifIndex")
NotificationGroup, ModuleCompliance, ObjectGroup = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance", "ObjectGroup")
Integer32, Counter64, Counter32, MibIdentifier, iso, Gauge32, enterprises, TimeTicks, MibScalar, MibTable, MibTableRow, MibTableColumn, ModuleIdentity, ObjectIdentity, Unsigned32, IpAddress, Bits, NotificationType = mibBuilder.importSymbols("SNMPv2-SMI", "Integer32", "Counter64", "Counter32", "MibIdentifier", "iso", "Gauge32", "enterprises", "TimeTicks", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "ModuleIdentity", "ObjectIdentity", "Unsigned32", "IpAddress", "Bits", "NotificationType")
DisplayString, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "TextualConvention")
privatetech = ModuleIdentity((1, 3, 6, 1, 4, 1, 5205))
if mibBuilder.loadTexts: privatetech.setLastUpdated('200607030000Z')
if mibBuilder.loadTexts: privatetech.setOrganization('xxx Tech Corp.')
switch = MibIdentifier((1, 3, 6, 1, 4, 1, 5205, 2))
sw0657840ProductID = MibIdentifier((1, 3, 6, 1, 4, 1, 5205, 2, 9))
sw0657840Produces = MibIdentifier((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1))
sw0657840System = MibIdentifier((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 1))
sw0657840CommonSys = MibIdentifier((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 1, 1))
sw0657840Reboot = MibScalar((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 0), ValueRangeConstraint(1, 1), ValueRangeConstraint(2, 2), ))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sw0657840Reboot.setStatus('current')
sw0657840BiosVsersion = MibScalar((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: sw0657840BiosVsersion.setStatus('current')
sw0657840FirmwareVersion = MibScalar((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 1, 1, 3), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: sw0657840FirmwareVersion.setStatus('current')
sw0657840HardwareVersion = MibScalar((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 1, 1, 4), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: sw0657840HardwareVersion.setStatus('current')
sw0657840MechanicalVersion = MibScalar((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 1, 1, 5), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: sw0657840MechanicalVersion.setStatus('current')
sw0657840SerialNumber = MibScalar((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 1, 1, 6), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: sw0657840SerialNumber.setStatus('current')
sw0657840HostMacAddress = MibScalar((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 1, 1, 7), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: sw0657840HostMacAddress.setStatus('current')
sw0657840DevicePort = MibScalar((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 1, 1, 8), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: sw0657840DevicePort.setStatus('current')
sw0657840RamSize = MibScalar((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 1, 1, 9), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: sw0657840RamSize.setStatus('current')
sw0657840FlashSize = MibScalar((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 1, 1, 10), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: sw0657840FlashSize.setStatus('current')
sw0657840IP = MibIdentifier((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 1, 2))
sw0657840DhcpSetting = MibScalar((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 1, 2, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 0), ValueRangeConstraint(1, 1), ))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sw0657840DhcpSetting.setStatus('current')
sw0657840IPAddress = MibScalar((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 1, 2, 2), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sw0657840IPAddress.setStatus('current')
sw0657840NetMask = MibScalar((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 1, 2, 3), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sw0657840NetMask.setStatus('current')
sw0657840DefaultGateway = MibScalar((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 1, 2, 4), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sw0657840DefaultGateway.setStatus('current')
sw0657840DnsSetting = MibScalar((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 1, 2, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 0), ValueRangeConstraint(1, 1), ))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sw0657840DnsSetting.setStatus('current')
sw0657840DnsServer = MibScalar((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 1, 2, 6), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sw0657840DnsServer.setStatus('current')
sw0657840Time = MibIdentifier((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 1, 3))
sw0657840SystemCurrentTime = MibScalar((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 1, 3, 1), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: sw0657840SystemCurrentTime.setStatus('current')
sw0657840ManualTimeSetting = MibScalar((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 1, 3, 2), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sw0657840ManualTimeSetting.setStatus('current')
sw0657840NTPServer = MibScalar((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 1, 3, 3), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sw0657840NTPServer.setStatus('current')
sw0657840NTPTimeZone = MibScalar((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 1, 3, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-12, 13))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sw0657840NTPTimeZone.setStatus('current')
sw0657840NTPTimeSync = MibScalar((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 1, 3, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 0), ValueRangeConstraint(1, 1), ))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sw0657840NTPTimeSync.setStatus('current')
sw0657840DaylightSavingTime = MibScalar((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 1, 3, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-5, 5))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sw0657840DaylightSavingTime.setStatus('current')
sw0657840DaylightStartTime = MibScalar((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 1, 3, 7), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sw0657840DaylightStartTime.setStatus('current')
sw0657840DaylightEndTime = MibScalar((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 1, 3, 8), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sw0657840DaylightEndTime.setStatus('current')
sw0657840Account = MibIdentifier((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 1, 4))
sw0657840AccountNumber = MibScalar((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 1, 4, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 5))).setMaxAccess("readonly")
if mibBuilder.loadTexts: sw0657840AccountNumber.setStatus('current')
sw0657840AccountTable = MibTable((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 1, 4, 2), )
if mibBuilder.loadTexts: sw0657840AccountTable.setStatus('current')
sw0657840AccountEntry = MibTableRow((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 1, 4, 2, 1), ).setIndexNames((0, "PRIVATE-SW0657840-MIB", "sw0657840AccountIndex"))
if mibBuilder.loadTexts: sw0657840AccountEntry.setStatus('current')
sw0657840AccountIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 1, 4, 2, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 5))).setMaxAccess("readonly")
if mibBuilder.loadTexts: sw0657840AccountIndex.setStatus('current')
sw0657840AccountAuthorization = MibTableColumn((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 1, 4, 2, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: sw0657840AccountAuthorization.setStatus('current')
sw0657840AccountName = MibTableColumn((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 1, 4, 2, 1, 3), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sw0657840AccountName.setStatus('current')
sw0657840AccountPassword = MibTableColumn((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 1, 4, 2, 1, 4), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sw0657840AccountPassword.setStatus('current')
sw0657840AccountAddName = MibScalar((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 1, 4, 3), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sw0657840AccountAddName.setStatus('current')
sw0657840AccountAddPassword = MibScalar((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 1, 4, 4), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sw0657840AccountAddPassword.setStatus('current')
sw0657840DoAccountAdd = MibScalar((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 1, 4, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 0), ValueRangeConstraint(1, 1), ))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sw0657840DoAccountAdd.setStatus('current')
sw0657840AccountDel = MibScalar((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 1, 4, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(2, 5))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sw0657840AccountDel.setStatus('current')
sw0657840Snmp = MibIdentifier((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 2))
sw0657840GetCommunity = MibScalar((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 2, 1), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sw0657840GetCommunity.setStatus('current')
sw0657840SetCommunity = MibScalar((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 2, 2), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sw0657840SetCommunity.setStatus('current')
sw0657840TrapHostNumber = MibScalar((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 2, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 6))).setMaxAccess("readonly")
if mibBuilder.loadTexts: sw0657840TrapHostNumber.setStatus('current')
sw0657840TrapHostTable = MibTable((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 2, 4), )
if mibBuilder.loadTexts: sw0657840TrapHostTable.setStatus('current')
sw0657840TrapHostEntry = MibTableRow((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 2, 4, 1), ).setIndexNames((0, "PRIVATE-SW0657840-MIB", "sw0657840TrapHostIndex"))
if mibBuilder.loadTexts: sw0657840TrapHostEntry.setStatus('current')
sw0657840TrapHostIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 2, 4, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 6))).setMaxAccess("readonly")
if mibBuilder.loadTexts: sw0657840TrapHostIndex.setStatus('current')
sw0657840TrapHostIP = MibTableColumn((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 2, 4, 1, 2), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sw0657840TrapHostIP.setStatus('current')
sw0657840TrapHostPort = MibTableColumn((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 2, 4, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sw0657840TrapHostPort.setStatus('current')
sw0657840TrapHostCommunity = MibTableColumn((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 2, 4, 1, 4), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sw0657840TrapHostCommunity.setStatus('current')
sw0657840Alarm = MibIdentifier((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 3))
sw0657840Event = MibIdentifier((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 3, 1))
sw0657840EventNumber = MibScalar((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 3, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647))).setMaxAccess("readonly")
if mibBuilder.loadTexts: sw0657840EventNumber.setStatus('current')
sw0657840EventTable = MibTable((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 3, 1, 2), )
if mibBuilder.loadTexts: sw0657840EventTable.setStatus('current')
sw0657840EventEntry = MibTableRow((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 3, 1, 2, 1), ).setIndexNames((0, "PRIVATE-SW0657840-MIB", "sw0657840EventIndex"))
if mibBuilder.loadTexts: sw0657840EventEntry.setStatus('current')
sw0657840EventIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 3, 1, 2, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647))).setMaxAccess("readonly")
if mibBuilder.loadTexts: sw0657840EventIndex.setStatus('current')
sw0657840EventName = MibTableColumn((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 3, 1, 2, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: sw0657840EventName.setStatus('current')
sw0657840EventSendEmail = MibTableColumn((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 3, 1, 2, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 0), ValueRangeConstraint(1, 1), ))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sw0657840EventSendEmail.setStatus('current')
sw0657840EventSendSMS = MibTableColumn((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 3, 1, 2, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 0), ValueRangeConstraint(1, 1), ))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sw0657840EventSendSMS.setStatus('current')
sw0657840EventSendTrap = MibTableColumn((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 3, 1, 2, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 0), ValueRangeConstraint(1, 1), ))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sw0657840EventSendTrap.setStatus('current')
sw0657840Email = MibIdentifier((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 3, 2))
sw0657840EmailServer = MibScalar((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 3, 2, 1), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sw0657840EmailServer.setStatus('current')
sw0657840EmailUsername = MibScalar((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 3, 2, 2), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sw0657840EmailUsername.setStatus('current')
sw0657840EmailPassword = MibScalar((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 3, 2, 3), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sw0657840EmailPassword.setStatus('current')
sw0657840EmailUserNumber = MibScalar((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 3, 2, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 6))).setMaxAccess("readonly")
if mibBuilder.loadTexts: sw0657840EmailUserNumber.setStatus('current')
sw0657840EmailUserTable = MibTable((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 3, 2, 5), )
if mibBuilder.loadTexts: sw0657840EmailUserTable.setStatus('current')
sw0657840EmailUserEntry = MibTableRow((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 3, 2, 5, 1), ).setIndexNames((0, "PRIVATE-SW0657840-MIB", "sw0657840EmailUserIndex"))
if mibBuilder.loadTexts: sw0657840EmailUserEntry.setStatus('current')
sw0657840EmailUserIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 3, 2, 5, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 6))).setMaxAccess("readonly")
if mibBuilder.loadTexts: sw0657840EmailUserIndex.setStatus('current')
sw0657840EmailUserAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 3, 2, 5, 1, 2), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sw0657840EmailUserAddress.setStatus('current')
sw0657840SMS = MibIdentifier((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 3, 3))
sw0657840SMSServer = MibScalar((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 3, 3, 1), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sw0657840SMSServer.setStatus('current')
sw0657840SMSUsername = MibScalar((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 3, 3, 2), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sw0657840SMSUsername.setStatus('current')
sw0657840SMSPassword = MibScalar((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 3, 3, 3), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sw0657840SMSPassword.setStatus('current')
sw0657840SMSUserNumber = MibScalar((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 3, 3, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 6))).setMaxAccess("readonly")
if mibBuilder.loadTexts: sw0657840SMSUserNumber.setStatus('current')
sw0657840SMSUserTable = MibTable((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 3, 3, 5), )
if mibBuilder.loadTexts: sw0657840SMSUserTable.setStatus('current')
sw0657840SMSUserEntry = MibTableRow((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 3, 3, 5, 1), ).setIndexNames((0, "PRIVATE-SW0657840-MIB", "sw0657840SMSUserIndex"))
if mibBuilder.loadTexts: sw0657840SMSUserEntry.setStatus('current')
sw0657840SMSUserIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 3, 3, 5, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 6))).setMaxAccess("readonly")
if mibBuilder.loadTexts: sw0657840SMSUserIndex.setStatus('current')
sw0657840SMSUserMobilePhone = MibTableColumn((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 3, 3, 5, 1, 2), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sw0657840SMSUserMobilePhone.setStatus('current')
sw0657840Tftp = MibIdentifier((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 4))
sw0657840TftpServer = MibScalar((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 4, 1), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sw0657840TftpServer.setStatus('current')
sw0657840Configuration = MibIdentifier((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 5))
sw0657840SaveRestore = MibIdentifier((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 5, 1))
sw0657840SaveStart = MibScalar((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 5, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 0), ValueRangeConstraint(1, 1), ))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sw0657840SaveStart.setStatus('current')
sw0657840SaveUser = MibScalar((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 5, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 0), ValueRangeConstraint(1, 1), ))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sw0657840SaveUser.setStatus('current')
sw0657840RestoreDefault = MibScalar((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 5, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 0), ValueRangeConstraint(1, 1), ValueRangeConstraint(2, 2), ))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sw0657840RestoreDefault.setStatus('current')
sw0657840RestoreUser = MibScalar((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 5, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 0), ValueRangeConstraint(1, 1), ))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sw0657840RestoreUser.setStatus('current')
sw0657840ConfigFile = MibIdentifier((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 5, 2))
sw0657840ExportConfigName = MibScalar((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 5, 2, 1), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sw0657840ExportConfigName.setStatus('current')
sw0657840DoExportConfig = MibScalar((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 5, 2, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(1, 1), ValueRangeConstraint(2, 2), ))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sw0657840DoExportConfig.setStatus('current')
sw0657840ImportConfigName = MibScalar((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 5, 2, 3), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sw0657840ImportConfigName.setStatus('current')
sw0657840DoImportConfig = MibScalar((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 5, 2, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(1, 1), ValueRangeConstraint(2, 2), ))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sw0657840DoImportConfig.setStatus('current')
sw0657840Diagnostic = MibIdentifier((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 6))
sw0657840EEPROMTest = MibScalar((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 6, 1), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: sw0657840EEPROMTest.setStatus('current')
sw0657840UartTest = MibScalar((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 6, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: sw0657840UartTest.setStatus('current')
sw0657840DramTest = MibScalar((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 6, 3), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: sw0657840DramTest.setStatus('current')
sw0657840FlashTest = MibScalar((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 6, 4), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: sw0657840FlashTest.setStatus('current')
sw0657840InternalLoopbackTest = MibScalar((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 6, 5), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: sw0657840InternalLoopbackTest.setStatus('current')
sw0657840ExternalLoopbackTest = MibScalar((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 6, 6), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: sw0657840ExternalLoopbackTest.setStatus('current')
sw0657840PingTest = MibScalar((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 6, 7), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sw0657840PingTest.setStatus('current')
sw0657840Log = MibIdentifier((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 7))
sw0657840ClearLog = MibScalar((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 7, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 0), ValueRangeConstraint(1, 1), ))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sw0657840ClearLog.setStatus('current')
sw0657840UploadLog = MibScalar((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 7, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 0), ValueRangeConstraint(1, 1), ))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sw0657840UploadLog.setStatus('current')
sw0657840AutoUploadLogState = MibScalar((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 7, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 0), ValueRangeConstraint(1, 1), ))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sw0657840AutoUploadLogState.setStatus('current')
sw0657840LogNumber = MibScalar((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 7, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 120))).setMaxAccess("readonly")
if mibBuilder.loadTexts: sw0657840LogNumber.setStatus('current')
sw0657840LogTable = MibTable((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 7, 5), )
if mibBuilder.loadTexts: sw0657840LogTable.setStatus('current')
sw0657840LogEntry = MibTableRow((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 7, 5, 1), ).setIndexNames((0, "PRIVATE-SW0657840-MIB", "sw0657840LogIndex"))
if mibBuilder.loadTexts: sw0657840LogEntry.setStatus('current')
sw0657840LogIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 7, 5, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 120))).setMaxAccess("readonly")
if mibBuilder.loadTexts: sw0657840LogIndex.setStatus('current')
sw0657840LogEvent = MibTableColumn((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 7, 5, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: sw0657840LogEvent.setStatus('current')
sw0657840Firmware = MibIdentifier((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 8))
sw0657840FirmwareFileName = MibScalar((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 8, 1), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sw0657840FirmwareFileName.setStatus('current')
sw0657840DoFirmwareUpgrade = MibScalar((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 8, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 0), ValueRangeConstraint(1, 1), ))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sw0657840DoFirmwareUpgrade.setStatus('current')
sw0657840Port = MibIdentifier((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 9))
sw0657840PortStatus = MibIdentifier((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 9, 1))
sw0657840PortStatusNumber = MibScalar((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 9, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647))).setMaxAccess("readonly")
if mibBuilder.loadTexts: sw0657840PortStatusNumber.setStatus('current')
sw0657840PortStatusTable = MibTable((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 9, 1, 2), )
if mibBuilder.loadTexts: sw0657840PortStatusTable.setStatus('current')
sw0657840PortStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 9, 1, 2, 1), ).setIndexNames((0, "PRIVATE-SW0657840-MIB", "sw0657840PortStatusIndex"))
if mibBuilder.loadTexts: sw0657840PortStatusEntry.setStatus('current')
sw0657840PortStatusIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 9, 1, 2, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647))).setMaxAccess("readonly")
if mibBuilder.loadTexts: sw0657840PortStatusIndex.setStatus('current')
sw0657840PortStatusMedia = MibTableColumn((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 9, 1, 2, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: sw0657840PortStatusMedia.setStatus('current')
sw0657840PortStatusLink = MibTableColumn((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 9, 1, 2, 1, 3), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: sw0657840PortStatusLink.setStatus('current')
sw0657840PortStatusPortState = MibTableColumn((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 9, 1, 2, 1, 4), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: sw0657840PortStatusPortState.setStatus('current')
sw0657840PortStatusAutoNego = MibTableColumn((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 9, 1, 2, 1, 5), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: sw0657840PortStatusAutoNego.setStatus('current')
sw0657840PortStatusSpdDpx = MibTableColumn((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 9, 1, 2, 1, 6), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: sw0657840PortStatusSpdDpx.setStatus('current')
sw0657840PortStatusFlwCtrl = MibTableColumn((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 9, 1, 2, 1, 7), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: sw0657840PortStatusFlwCtrl.setStatus('current')
sw0657840PortStatuDescription = MibTableColumn((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 9, 1, 2, 1, 8), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: sw0657840PortStatuDescription.setStatus('current')
sw0657840PortConf = MibIdentifier((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 9, 2))
sw0657840PortConfNumber = MibScalar((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 9, 2, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647))).setMaxAccess("readonly")
if mibBuilder.loadTexts: sw0657840PortConfNumber.setStatus('current')
sw0657840PortConfTable = MibTable((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 9, 2, 2), )
if mibBuilder.loadTexts: sw0657840PortConfTable.setStatus('current')
sw0657840PortConfEntry = MibTableRow((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 9, 2, 2, 1), ).setIndexNames((0, "PRIVATE-SW0657840-MIB", "sw0657840PortConfIndex"))
if mibBuilder.loadTexts: sw0657840PortConfEntry.setStatus('current')
sw0657840PortConfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 9, 2, 2, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647))).setMaxAccess("readonly")
if mibBuilder.loadTexts: sw0657840PortConfIndex.setStatus('current')
sw0657840PortConfPortState = MibTableColumn((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 9, 2, 2, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 0), ValueRangeConstraint(1, 1), ))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sw0657840PortConfPortState.setStatus('current')
sw0657840PortConfSpdDpx = MibTableColumn((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 9, 2, 2, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 5))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sw0657840PortConfSpdDpx.setStatus('current')
sw0657840PortConfFlwCtrl = MibTableColumn((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 9, 2, 2, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 0), ValueRangeConstraint(1, 1), ))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sw0657840PortConfFlwCtrl.setStatus('current')
sw0657840PortConfDescription = MibTableColumn((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 9, 2, 2, 1, 5), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sw0657840PortConfDescription.setStatus('current')
sw0657840Mirror = MibIdentifier((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 10))
sw0657840MirrorMode = MibScalar((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 10, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 0), ValueRangeConstraint(1, 1), ))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sw0657840MirrorMode.setStatus('current')
sw0657840MirroringPort = MibScalar((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 10, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sw0657840MirroringPort.setStatus('current')
sw0657840MirroredPorts = MibScalar((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 10, 3), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sw0657840MirroredPorts.setStatus('current')
sw0657840MaxPktLen = MibIdentifier((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 11))
sw0657840MaxPktLen1 = MibIdentifier((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 11, 1))
sw0657840MaxPktLenPortNumber = MibScalar((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 11, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647))).setMaxAccess("readonly")
if mibBuilder.loadTexts: sw0657840MaxPktLenPortNumber.setStatus('current')
sw0657840MaxPktLenConfTable = MibTable((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 11, 1, 2), )
if mibBuilder.loadTexts: sw0657840MaxPktLenConfTable.setStatus('current')
sw0657840MaxPktLenConfEntry = MibTableRow((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 11, 1, 2, 1), ).setIndexNames((0, "PRIVATE-SW0657840-MIB", "sw0657840MaxPktLenConfIndex"))
if mibBuilder.loadTexts: sw0657840MaxPktLenConfEntry.setStatus('current')
sw0657840MaxPktLenConfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 11, 1, 2, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647))).setMaxAccess("readonly")
if mibBuilder.loadTexts: sw0657840MaxPktLenConfIndex.setStatus('current')
sw0657840MaxPktLenConfSetting = MibTableColumn((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 11, 1, 2, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(1518, 1518), ValueRangeConstraint(1532, 1532), ValueRangeConstraint(9216, 9216), ))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sw0657840MaxPktLenConfSetting.setStatus('current')
sw0657840Bandwidth = MibIdentifier((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 12))
sw0657840Bandwidth1 = MibIdentifier((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 12, 1))
sw0657840BandwidthPortNumber = MibScalar((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 12, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647))).setMaxAccess("readonly")
if mibBuilder.loadTexts: sw0657840BandwidthPortNumber.setStatus('current')
sw0657840BandwidthConfTable = MibTable((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 12, 1, 2), )
if mibBuilder.loadTexts: sw0657840BandwidthConfTable.setStatus('current')
sw0657840BandwidthConfEntry = MibTableRow((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 12, 1, 2, 1), ).setIndexNames((0, "PRIVATE-SW0657840-MIB", "sw0657840BandwidthConfIndex"))
if mibBuilder.loadTexts: sw0657840BandwidthConfEntry.setStatus('current')
sw0657840BandwidthConfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 12, 1, 2, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647))).setMaxAccess("readonly")
if mibBuilder.loadTexts: sw0657840BandwidthConfIndex.setStatus('current')
sw0657840BandwidthConfIngressState = MibTableColumn((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 12, 1, 2, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 0), ValueRangeConstraint(1, 1), ))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sw0657840BandwidthConfIngressState.setStatus('current')
sw0657840BandwidthConfIngressBW = MibTableColumn((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 12, 1, 2, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 1000))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sw0657840BandwidthConfIngressBW.setStatus('current')
sw0657840BandwidthConfStormState = MibTableColumn((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 12, 1, 2, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 0), ValueRangeConstraint(1, 1), ))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sw0657840BandwidthConfStormState.setStatus('current')
sw0657840BandwidthConfStormBW = MibTableColumn((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 12, 1, 2, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 1000))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sw0657840BandwidthConfStormBW.setStatus('current')
sw0657840BandwidthConfEgressState = MibTableColumn((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 12, 1, 2, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 0), ValueRangeConstraint(1, 1), ))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sw0657840BandwidthConfEgressState.setStatus('current')
sw0657840BandwidthConfEgressBW = MibTableColumn((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 12, 1, 2, 1, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 1000))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sw0657840BandwidthConfEgressBW.setStatus('current')
sw0657840LoopDetectedConf = MibIdentifier((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 13))
sw0657840LoopDetectedNumber = MibScalar((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 13, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647))).setMaxAccess("readonly")
if mibBuilder.loadTexts: sw0657840LoopDetectedNumber.setStatus('current')
sw0657840LoopDetectedTable = MibTable((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 13, 2), )
if mibBuilder.loadTexts: sw0657840LoopDetectedTable.setStatus('current')
sw0657840LoopDetectedEntry = MibTableRow((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 13, 2, 1), ).setIndexNames((0, "PRIVATE-SW0657840-MIB", "sw0657840LoopDetectedfIndex"))
if mibBuilder.loadTexts: sw0657840LoopDetectedEntry.setStatus('current')
sw0657840LoopDetectedfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 13, 2, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647))).setMaxAccess("readonly")
if mibBuilder.loadTexts: sw0657840LoopDetectedfIndex.setStatus('current')
sw0657840LoopDetectedStateEbl = MibTableColumn((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 13, 2, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 0), ValueRangeConstraint(1, 1), ))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sw0657840LoopDetectedStateEbl.setStatus('current')
sw0657840LoopDetectedCurrentStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 13, 2, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 0), ValueRangeConstraint(1, 1), ))).setMaxAccess("readonly")
if mibBuilder.loadTexts: sw0657840LoopDetectedCurrentStatus.setStatus('current')
sw0657840LoopDetectedResumed = MibTableColumn((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 13, 2, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 0), ValueRangeConstraint(1, 1), ))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sw0657840LoopDetectedResumed.setStatus('current')
sw0657840LoopDetectedAction = MibScalar((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 13, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 0), ValueRangeConstraint(1, 1), ))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sw0657840LoopDetectedAction.setStatus('current')
sw0657840SFPInfo = MibIdentifier((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 14))
sw0657840SFPInfoNumber = MibScalar((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 14, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647))).setMaxAccess("readonly")
if mibBuilder.loadTexts: sw0657840SFPInfoNumber.setStatus('current')
sw0657840SFPInfoTable = MibTable((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 14, 2), )
if mibBuilder.loadTexts: sw0657840SFPInfoTable.setStatus('current')
sw0657840SFPInfoEntry = MibTableRow((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 14, 2, 1), ).setIndexNames((0, "PRIVATE-SW0657840-MIB", "sw0657840SFPInfoIndex"))
if mibBuilder.loadTexts: sw0657840SFPInfoEntry.setStatus('current')
sw0657840SFPInfoIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 14, 2, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647))).setMaxAccess("readonly")
if mibBuilder.loadTexts: sw0657840SFPInfoIndex.setStatus('current')
sw0657840SFPConnectorType = MibTableColumn((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 14, 2, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: sw0657840SFPConnectorType.setStatus('current')
sw0657840SFPFiberType = MibTableColumn((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 14, 2, 1, 3), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: sw0657840SFPFiberType.setStatus('current')
sw0657840SFPWavelength = MibTableColumn((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 14, 2, 1, 4), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: sw0657840SFPWavelength.setStatus('current')
sw0657840SFPBaudRate = MibTableColumn((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 14, 2, 1, 5), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: sw0657840SFPBaudRate.setStatus('current')
sw0657840SFPVendorOUI = MibTableColumn((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 14, 2, 1, 6), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: sw0657840SFPVendorOUI.setStatus('current')
sw0657840SFPVendorName = MibTableColumn((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 14, 2, 1, 7), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: sw0657840SFPVendorName.setStatus('current')
sw0657840SFPVendorPN = MibTableColumn((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 14, 2, 1, 8), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: sw0657840SFPVendorPN.setStatus('current')
sw0657840SFPVendorRev = MibTableColumn((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 14, 2, 1, 9), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: sw0657840SFPVendorRev.setStatus('current')
sw0657840SFPVendorSN = MibTableColumn((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 14, 2, 1, 10), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: sw0657840SFPVendorSN.setStatus('current')
sw0657840SFPDateCode = MibTableColumn((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 14, 2, 1, 11), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: sw0657840SFPDateCode.setStatus('current')
sw0657840SFPTemperature = MibTableColumn((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 14, 2, 1, 12), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: sw0657840SFPTemperature.setStatus('current')
sw0657840SFPVcc = MibTableColumn((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 14, 2, 1, 13), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: sw0657840SFPVcc.setStatus('current')
sw0657840SFPTxBias = MibTableColumn((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 14, 2, 1, 14), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: sw0657840SFPTxBias.setStatus('current')
sw0657840SFPTxPWR = MibTableColumn((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 14, 2, 1, 15), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: sw0657840SFPTxPWR.setStatus('current')
sw0657840SFPRxPWR = MibTableColumn((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 14, 2, 1, 16), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: sw0657840SFPRxPWR.setStatus('current')
sw0657840TrapEntry = MibIdentifier((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 20))
sw0657840ModuleInserted = NotificationType((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 20, 1)).setObjects(("IF-MIB", "ifIndex"))
if mibBuilder.loadTexts: sw0657840ModuleInserted.setStatus('current')
sw0657840ModuleRemoved = NotificationType((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 20, 2)).setObjects(("IF-MIB", "ifIndex"))
if mibBuilder.loadTexts: sw0657840ModuleRemoved.setStatus('current')
sw0657840DualMediaSwapped = NotificationType((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 20, 3)).setObjects(("IF-MIB", "ifIndex"))
if mibBuilder.loadTexts: sw0657840DualMediaSwapped.setStatus('current')
sw0657840LoopDetected = NotificationType((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 20, 5)).setObjects(("IF-MIB", "ifIndex"))
if mibBuilder.loadTexts: sw0657840LoopDetected.setStatus('current')
sw0657840StpStateDisabled = NotificationType((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 20, 100))
if mibBuilder.loadTexts: sw0657840StpStateDisabled.setStatus('current')
sw0657840StpStateEnabled = NotificationType((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 20, 101))
if mibBuilder.loadTexts: sw0657840StpStateEnabled.setStatus('current')
sw0657840StpTopologyChanged = NotificationType((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 20, 102)).setObjects(("IF-MIB", "ifIndex"))
if mibBuilder.loadTexts: sw0657840StpTopologyChanged.setStatus('current')
sw0657840LacpStateDisabled = NotificationType((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 20, 120)).setObjects(("IF-MIB", "ifIndex"), ("PRIVATE-SW0657840-MIB", "groupId"))
if mibBuilder.loadTexts: sw0657840LacpStateDisabled.setStatus('current')
sw0657840LacpStateEnabled = NotificationType((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 20, 121)).setObjects(("IF-MIB", "ifIndex"), ("PRIVATE-SW0657840-MIB", "groupId"))
if mibBuilder.loadTexts: sw0657840LacpStateEnabled.setStatus('current')
sw0657840LacpPortAdded = NotificationType((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 20, 123)).setObjects(("IF-MIB", "ifIndex"), ("PRIVATE-SW0657840-MIB", "actorkey"), ("PRIVATE-SW0657840-MIB", "partnerkey"))
if mibBuilder.loadTexts: sw0657840LacpPortAdded.setStatus('current')
sw0657840LacpPortTrunkFailure = NotificationType((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 20, 124)).setObjects(("IF-MIB", "ifIndex"), ("PRIVATE-SW0657840-MIB", "actorkey"), ("PRIVATE-SW0657840-MIB", "partnerkey"))
if mibBuilder.loadTexts: sw0657840LacpPortTrunkFailure.setStatus('current')
sw0657840GvrpStateDisabled = NotificationType((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 20, 140))
if mibBuilder.loadTexts: sw0657840GvrpStateDisabled.setStatus('current')
sw0657840GvrpStateEnabled = NotificationType((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 20, 141))
if mibBuilder.loadTexts: sw0657840GvrpStateEnabled.setStatus('current')
sw0657840VlanStateDisabled = NotificationType((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 20, 150))
if mibBuilder.loadTexts: sw0657840VlanStateDisabled.setStatus('current')
sw0657840VlanPortBaseEnabled = NotificationType((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 20, 151))
if mibBuilder.loadTexts: sw0657840VlanPortBaseEnabled.setStatus('current')
sw0657840VlanTagBaseEnabled = NotificationType((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 20, 152))
if mibBuilder.loadTexts: sw0657840VlanTagBaseEnabled.setStatus('current')
sw0657840VlanMetroModeEnabled = NotificationType((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 20, 153)).setObjects(("PRIVATE-SW0657840-MIB", "uplink"))
if mibBuilder.loadTexts: sw0657840VlanMetroModeEnabled.setStatus('current')
sw0657840VlanDoubleTagEnabled = NotificationType((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 20, 154))
if mibBuilder.loadTexts: sw0657840VlanDoubleTagEnabled.setStatus('current')
sw0657840UserLogin = NotificationType((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 20, 200)).setObjects(("PRIVATE-SW0657840-MIB", "username"))
if mibBuilder.loadTexts: sw0657840UserLogin.setStatus('current')
sw0657840UserLogout = NotificationType((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 20, 201)).setObjects(("PRIVATE-SW0657840-MIB", "username"))
if mibBuilder.loadTexts: sw0657840UserLogout.setStatus('current')
sw0657840TrapVariable = MibIdentifier((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 21))
username = MibScalar((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 21, 1), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: username.setStatus('current')
groupId = MibScalar((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 21, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 8))).setMaxAccess("readonly")
if mibBuilder.loadTexts: groupId.setStatus('current')
actorkey = MibScalar((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 21, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647))).setMaxAccess("readonly")
if mibBuilder.loadTexts: actorkey.setStatus('current')
partnerkey = MibScalar((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 21, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647))).setMaxAccess("readonly")
if mibBuilder.loadTexts: partnerkey.setStatus('current')
uplink = MibScalar((1, 3, 6, 1, 4, 1, 5205, 2, 9, 1, 21, 5), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplink.setStatus('current')
mibBuilder.exportSymbols("PRIVATE-SW0657840-MIB", sw0657840Diagnostic=sw0657840Diagnostic, sw0657840SMSUserTable=sw0657840SMSUserTable, sw0657840PortConfFlwCtrl=sw0657840PortConfFlwCtrl, sw0657840SerialNumber=sw0657840SerialNumber, sw0657840DevicePort=sw0657840DevicePort, sw0657840SFPTxPWR=sw0657840SFPTxPWR, username=username, sw0657840PortConfTable=sw0657840PortConfTable, sw0657840PingTest=sw0657840PingTest, sw0657840Time=sw0657840Time, sw0657840PortStatusTable=sw0657840PortStatusTable, sw0657840SFPVendorRev=sw0657840SFPVendorRev, sw0657840ModuleInserted=sw0657840ModuleInserted, sw0657840AccountNumber=sw0657840AccountNumber, sw0657840ModuleRemoved=sw0657840ModuleRemoved, sw0657840SFPWavelength=sw0657840SFPWavelength, sw0657840PortStatusLink=sw0657840PortStatusLink, sw0657840BandwidthConfStormBW=sw0657840BandwidthConfStormBW, sw0657840UploadLog=sw0657840UploadLog, sw0657840EventSendSMS=sw0657840EventSendSMS, sw0657840PortConf=sw0657840PortConf, sw0657840DoImportConfig=sw0657840DoImportConfig, sw0657840PortConfSpdDpx=sw0657840PortConfSpdDpx, sw0657840Alarm=sw0657840Alarm, switch=switch, sw0657840EEPROMTest=sw0657840EEPROMTest, sw0657840LoopDetectedConf=sw0657840LoopDetectedConf, sw0657840LacpStateDisabled=sw0657840LacpStateDisabled, sw0657840BandwidthConfIngressBW=sw0657840BandwidthConfIngressBW, sw0657840TrapHostIndex=sw0657840TrapHostIndex, actorkey=actorkey, sw0657840ExternalLoopbackTest=sw0657840ExternalLoopbackTest, sw0657840MirroringPort=sw0657840MirroringPort, sw0657840RestoreDefault=sw0657840RestoreDefault, sw0657840SFPVendorOUI=sw0657840SFPVendorOUI, sw0657840AccountIndex=sw0657840AccountIndex, sw0657840MirroredPorts=sw0657840MirroredPorts, sw0657840AutoUploadLogState=sw0657840AutoUploadLogState, sw0657840DoAccountAdd=sw0657840DoAccountAdd, sw0657840BandwidthPortNumber=sw0657840BandwidthPortNumber, sw0657840RamSize=sw0657840RamSize, sw0657840EventIndex=sw0657840EventIndex, sw0657840Produces=sw0657840Produces, sw0657840FlashSize=sw0657840FlashSize, sw0657840EmailPassword=sw0657840EmailPassword, sw0657840SMSUsername=sw0657840SMSUsername, sw0657840Mirror=sw0657840Mirror, sw0657840Configuration=sw0657840Configuration, sw0657840BandwidthConfStormState=sw0657840BandwidthConfStormState, sw0657840NTPTimeSync=sw0657840NTPTimeSync, sw0657840LoopDetectedfIndex=sw0657840LoopDetectedfIndex, sw0657840TrapEntry=sw0657840TrapEntry, sw0657840SMSServer=sw0657840SMSServer, sw0657840MirrorMode=sw0657840MirrorMode, sw0657840SFPTemperature=sw0657840SFPTemperature, sw0657840UartTest=sw0657840UartTest, sw0657840GvrpStateEnabled=sw0657840GvrpStateEnabled, sw0657840TrapVariable=sw0657840TrapVariable, sw0657840SaveRestore=sw0657840SaveRestore, sw0657840PortConfNumber=sw0657840PortConfNumber, sw0657840PortStatusIndex=sw0657840PortStatusIndex, sw0657840AccountPassword=sw0657840AccountPassword, sw0657840PortStatusMedia=sw0657840PortStatusMedia, sw0657840SMSUserEntry=sw0657840SMSUserEntry, sw0657840DoExportConfig=sw0657840DoExportConfig, sw0657840Bandwidth=sw0657840Bandwidth, sw0657840TrapHostTable=sw0657840TrapHostTable, sw0657840SystemCurrentTime=sw0657840SystemCurrentTime, sw0657840EventEntry=sw0657840EventEntry, sw0657840EventSendTrap=sw0657840EventSendTrap, sw0657840LoopDetectedEntry=sw0657840LoopDetectedEntry, sw0657840SFPRxPWR=sw0657840SFPRxPWR, sw0657840MaxPktLenConfTable=sw0657840MaxPktLenConfTable, sw0657840LoopDetectedAction=sw0657840LoopDetectedAction, sw0657840ProductID=sw0657840ProductID, sw0657840VlanPortBaseEnabled=sw0657840VlanPortBaseEnabled, sw0657840LogIndex=sw0657840LogIndex, sw0657840UserLogin=sw0657840UserLogin, sw0657840SFPBaudRate=sw0657840SFPBaudRate, sw0657840Account=sw0657840Account, sw0657840TrapHostCommunity=sw0657840TrapHostCommunity, sw0657840LacpPortTrunkFailure=sw0657840LacpPortTrunkFailure, sw0657840EventTable=sw0657840EventTable, sw0657840LoopDetectedCurrentStatus=sw0657840LoopDetectedCurrentStatus, sw0657840FirmwareVersion=sw0657840FirmwareVersion, sw0657840ImportConfigName=sw0657840ImportConfigName, sw0657840PortStatusAutoNego=sw0657840PortStatusAutoNego, sw0657840MaxPktLen=sw0657840MaxPktLen, sw0657840DhcpSetting=sw0657840DhcpSetting, sw0657840Reboot=sw0657840Reboot, sw0657840SaveUser=sw0657840SaveUser, sw0657840PortStatusPortState=sw0657840PortStatusPortState, sw0657840BandwidthConfIngressState=sw0657840BandwidthConfIngressState, sw0657840SetCommunity=sw0657840SetCommunity, sw0657840VlanDoubleTagEnabled=sw0657840VlanDoubleTagEnabled, sw0657840Log=sw0657840Log, sw0657840DoFirmwareUpgrade=sw0657840DoFirmwareUpgrade, sw0657840HostMacAddress=sw0657840HostMacAddress, sw0657840FirmwareFileName=sw0657840FirmwareFileName, sw0657840LoopDetectedTable=sw0657840LoopDetectedTable, sw0657840ManualTimeSetting=sw0657840ManualTimeSetting, sw0657840LogEvent=sw0657840LogEvent, sw0657840ClearLog=sw0657840ClearLog, sw0657840EventName=sw0657840EventName, sw0657840PortStatuDescription=sw0657840PortStatuDescription, sw0657840NTPTimeZone=sw0657840NTPTimeZone, sw0657840LacpStateEnabled=sw0657840LacpStateEnabled, sw0657840SMSUserMobilePhone=sw0657840SMSUserMobilePhone, sw0657840StpStateEnabled=sw0657840StpStateEnabled, sw0657840VlanTagBaseEnabled=sw0657840VlanTagBaseEnabled, sw0657840DnsServer=sw0657840DnsServer, sw0657840LogTable=sw0657840LogTable, sw0657840MaxPktLen1=sw0657840MaxPktLen1, sw0657840EmailUserIndex=sw0657840EmailUserIndex, sw0657840ConfigFile=sw0657840ConfigFile, sw0657840LoopDetected=sw0657840LoopDetected, sw0657840EmailUserTable=sw0657840EmailUserTable, sw0657840DaylightEndTime=sw0657840DaylightEndTime, sw0657840SaveStart=sw0657840SaveStart, sw0657840SMS=sw0657840SMS, sw0657840DnsSetting=sw0657840DnsSetting, sw0657840Snmp=sw0657840Snmp, sw0657840IPAddress=sw0657840IPAddress, sw0657840DefaultGateway=sw0657840DefaultGateway, sw0657840AccountDel=sw0657840AccountDel, sw0657840EmailUserNumber=sw0657840EmailUserNumber, sw0657840AccountAddPassword=sw0657840AccountAddPassword, sw0657840SFPVendorSN=sw0657840SFPVendorSN, sw0657840EmailUserAddress=sw0657840EmailUserAddress, sw0657840NTPServer=sw0657840NTPServer, sw0657840EventNumber=sw0657840EventNumber, sw0657840BandwidthConfEntry=sw0657840BandwidthConfEntry, sw0657840LoopDetectedStateEbl=sw0657840LoopDetectedStateEbl, sw0657840GvrpStateDisabled=sw0657840GvrpStateDisabled, sw0657840AccountName=sw0657840AccountName, sw0657840EmailUsername=sw0657840EmailUsername, sw0657840DramTest=sw0657840DramTest, sw0657840FlashTest=sw0657840FlashTest, sw0657840Tftp=sw0657840Tftp, sw0657840RestoreUser=sw0657840RestoreUser, sw0657840EmailUserEntry=sw0657840EmailUserEntry, sw0657840VlanMetroModeEnabled=sw0657840VlanMetroModeEnabled, sw0657840SFPInfoIndex=sw0657840SFPInfoIndex, sw0657840PortStatusFlwCtrl=sw0657840PortStatusFlwCtrl, partnerkey=partnerkey, sw0657840DualMediaSwapped=sw0657840DualMediaSwapped, sw0657840SFPInfoNumber=sw0657840SFPInfoNumber, sw0657840Email=sw0657840Email, sw0657840MaxPktLenPortNumber=sw0657840MaxPktLenPortNumber, sw0657840LoopDetectedNumber=sw0657840LoopDetectedNumber, groupId=groupId, sw0657840SFPInfo=sw0657840SFPInfo, sw0657840PortStatus=sw0657840PortStatus, sw0657840TrapHostIP=sw0657840TrapHostIP, sw0657840TftpServer=sw0657840TftpServer, sw0657840Event=sw0657840Event, sw0657840BandwidthConfEgressBW=sw0657840BandwidthConfEgressBW, sw0657840SFPFiberType=sw0657840SFPFiberType, sw0657840BiosVsersion=sw0657840BiosVsersion, sw0657840Bandwidth1=sw0657840Bandwidth1, sw0657840ExportConfigName=sw0657840ExportConfigName, sw0657840SFPInfoEntry=sw0657840SFPInfoEntry, sw0657840SFPInfoTable=sw0657840SFPInfoTable, sw0657840AccountAddName=sw0657840AccountAddName, sw0657840TrapHostPort=sw0657840TrapHostPort, sw0657840TrapHostEntry=sw0657840TrapHostEntry, sw0657840CommonSys=sw0657840CommonSys, privatetech=privatetech, sw0657840VlanStateDisabled=sw0657840VlanStateDisabled, sw0657840DaylightSavingTime=sw0657840DaylightSavingTime, sw0657840AccountTable=sw0657840AccountTable, sw0657840BandwidthConfTable=sw0657840BandwidthConfTable, sw0657840LoopDetectedResumed=sw0657840LoopDetectedResumed, sw0657840SMSUserIndex=sw0657840SMSUserIndex, sw0657840StpStateDisabled=sw0657840StpStateDisabled, sw0657840PortStatusEntry=sw0657840PortStatusEntry, sw0657840SMSUserNumber=sw0657840SMSUserNumber, sw0657840BandwidthConfEgressState=sw0657840BandwidthConfEgressState, sw0657840LogEntry=sw0657840LogEntry, sw0657840MechanicalVersion=sw0657840MechanicalVersion, sw0657840NetMask=sw0657840NetMask, sw0657840GetCommunity=sw0657840GetCommunity, sw0657840SFPConnectorType=sw0657840SFPConnectorType, sw0657840TrapHostNumber=sw0657840TrapHostNumber, sw0657840SFPVcc=sw0657840SFPVcc, sw0657840LogNumber=sw0657840LogNumber, sw0657840LacpPortAdded=sw0657840LacpPortAdded, sw0657840SFPVendorPN=sw0657840SFPVendorPN, sw0657840UserLogout=sw0657840UserLogout, sw0657840EmailServer=sw0657840EmailServer, sw0657840EventSendEmail=sw0657840EventSendEmail, sw0657840Port=sw0657840Port, sw0657840IP=sw0657840IP, sw0657840HardwareVersion=sw0657840HardwareVersion, sw0657840MaxPktLenConfEntry=sw0657840MaxPktLenConfEntry, sw0657840DaylightStartTime=sw0657840DaylightStartTime, sw0657840SFPVendorName=sw0657840SFPVendorName, sw0657840PortConfIndex=sw0657840PortConfIndex, sw0657840PortConfDescription=sw0657840PortConfDescription, sw0657840BandwidthConfIndex=sw0657840BandwidthConfIndex, sw0657840PortStatusNumber=sw0657840PortStatusNumber, sw0657840AccountEntry=sw0657840AccountEntry, sw0657840StpTopologyChanged=sw0657840StpTopologyChanged, sw0657840InternalLoopbackTest=sw0657840InternalLoopbackTest, sw0657840MaxPktLenConfSetting=sw0657840MaxPktLenConfSetting, sw0657840SFPTxBias=sw0657840SFPTxBias, sw0657840PortConfEntry=sw0657840PortConfEntry, sw0657840MaxPktLenConfIndex=sw0657840MaxPktLenConfIndex, uplink=uplink, sw0657840PortStatusSpdDpx=sw0657840PortStatusSpdDpx, sw0657840PortConfPortState=sw0657840PortConfPortState, sw0657840System=sw0657840System, sw0657840Firmware=sw0657840Firmware, PYSNMP_MODULE_ID=privatetech, sw0657840SMSPassword=sw0657840SMSPassword, sw0657840SFPDateCode=sw0657840SFPDateCode, sw0657840AccountAuthorization=sw0657840AccountAuthorization)
| 129.882494 | 10,027 | 0.773342 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 5,080 | 0.093794 |
2dedbb9bc8cf45b074be9e76b40b810632cf828b | 14,005 | py | Python | simpleference/inference/util.py | neptunes5thmoon/simpleference | 96c74187bd3d83f6f1e910e72e56f45d6cc8d5d9 | [
"MIT"
] | null | null | null | simpleference/inference/util.py | neptunes5thmoon/simpleference | 96c74187bd3d83f6f1e910e72e56f45d6cc8d5d9 | [
"MIT"
] | null | null | null | simpleference/inference/util.py | neptunes5thmoon/simpleference | 96c74187bd3d83f6f1e910e72e56f45d6cc8d5d9 | [
"MIT"
] | null | null | null | from __future__ import print_function
try:
import h5py
WITH_H5PY = True
except ImportError:
WITH_H5PY = False
try:
import zarr
WITH_ZARR = True
from .io import IoZarr
except ImportError:
WITH_ZARR = False
try:
import z5py
WITH_Z5PY = True
from .io import IoN5
except ImportError:
WITH_Z5PY = False
import os
import json
from random import shuffle
import numpy as np
import re
import fnmatch
from .inference import load_input_crop
import dask
import toolz as tz
import logging
def _offset_list(shape, output_shape):
in_list = []
for z in np.arange(0, shape[0], output_shape[0]):
for y in np.arange(0, shape[1], output_shape[1]):
for x in np.arange(0, shape[2], output_shape[2]):
in_list.append([float(z), float(y), float(x)])
return in_list
# NOTE this will not cover the whole volume
def _offset_list_with_shift(shape, output_shape, shift):
in_list = []
for z in np.arange(0, shape[0], output_shape[0]):
for y in np.arange(0, shape[1], output_shape[1]):
for x in np.arange(0, shape[2], output_shape[2]):
in_list.append([min(float(z) + shift[0], shape[0]),
min(float(y) + shift[1], shape[1]),
min(float(x) + shift[2], shape[2])])
return in_list
# this returns the offsets for the given output blocks.
# blocks are padded on the fly during inference if necessary
def get_offset_lists(shape,
gpu_list,
save_folder,
output_shape,
randomize=False,
shift=None):
in_list = _offset_list(shape, output_shape) if shift is None else\
_offset_list_with_shift(shape, output_shape, shift)
if randomize:
shuffle(in_list)
n_splits = len(gpu_list)
out_list = [in_list[i::n_splits] for i in range(n_splits)]
if not os.path.exists(save_folder):
os.mkdir(save_folder)
for ii, olist in enumerate(out_list):
list_name = os.path.join(save_folder, 'list_gpu_%i.json' % gpu_list[ii])
with open(list_name, 'w') as f:
json.dump(olist, f)
# this returns the offsets for the given output blocks and bounding box.
# blocks are padded on the fly during inference if necessary
def get_offset_lists_with_bb(shape,
gpu_list,
save_folder,
output_shape,
bb_start,
bb_stop,
randomize=False):
# zap the bounding box to grid defined by out_blocks
bb_start_c = [(bbs // outs) * outs for bbs, outs in zip(bb_start, output_shape)]
bb_stop_c = [(bbs // outs + 1) * outs for bbs, outs in zip(bb_stop, output_shape)]
in_list = []
for z in range(bb_start_c[0], bb_stop_c[0], output_shape[0]):
for y in range(bb_start_c[1], bb_stop_c[1], output_shape[1]):
for x in range(bb_start_c[2], bb_stop_c[2], output_shape[2]):
in_list.append([z, y, x])
if randomize:
shuffle(in_list)
n_splits = len(gpu_list)
out_list = [in_list[i::n_splits] for i in range(n_splits)]
if not os.path.exists(save_folder):
os.mkdir(save_folder)
for ii, olist in enumerate(out_list):
list_name = os.path.join(save_folder, 'list_gpu_%i.json' % gpu_list[ii])
with open(list_name, 'w') as f:
json.dump(olist, f)
# redistributing offset lists from failed jobs
def redistribute_offset_lists(gpu_list, save_folder):
p_full = re.compile("list_gpu_\d+.json")
p_proc = re.compile("list_gpu_\d+_\S*_processed.txt")
full_list_jsons = []
processed_list_files = []
for f in os.listdir(save_folder):
mo_full = p_full.match(f)
mo_proc = p_proc.match(f)
if mo_full is not None:
full_list_jsons.append(f)
if mo_proc is not None:
processed_list_files.append(f)
full_block_list = set()
for fl in full_list_jsons:
with open(os.path.join(save_folder, fl), 'r') as f:
bl = json.load(f)
full_block_list.update({tuple(coo) for coo in bl})
processed_block_list = set()
bls = []
for pl in processed_list_files:
with open(os.path.join(save_folder, pl), 'r') as f:
bl_txt = f.read()
bl_txt = '[' + bl_txt[:bl_txt.rfind(']') + 1] + ']'
bls.append(json.loads(bl_txt))
processed_block_list.update({tuple(coo) for coo in bls[-1]})
to_be_processed_block_list = list(full_block_list - processed_block_list)
previous_tries = []
p_tries = re.compile("list_gpu_\d+_try\d+.json")
for f in os.listdir(save_folder):
mo_tries = p_tries.match(f)
if mo_tries is not None:
previous_tries.append(f)
if len(previous_tries) == 0:
tryno = 0
else:
trynos = []
for tr in previous_tries:
trynos.append(int(tr.split('try')[1].split('.json')[0]))
tryno = max(trynos)+1
print('Backing up last try ({0:})'.format(tryno))
for f in full_list_jsons:
os.rename(os.path.join(save_folder,f), os.path.join(save_folder, f[:-5] + '_try{0:}.json'.format(tryno)))
for f in processed_list_files:
os.rename(os.path.join(save_folder,f), os.path.join(save_folder, f[:-4] + '_try{0:}.txt'.format(tryno)))
n_splits = len(gpu_list)
out_list = [to_be_processed_block_list[i::n_splits] for i in range(n_splits)]
for ii, olist in enumerate(out_list):
if len(olist) > 0:
list_name = os.path.join(save_folder, 'list_gpu_%i.json' % gpu_list[ii])
with open(list_name, 'w') as f:
json.dump(olist, f)
def load_ds(path, key):
ext = os.path.splitext(path)[-1]
if ext.lower() in ('.h5', '.hdf', '.hdf'):
assert WITH_H5PY
with h5py.File(path, 'r') as f:
ds = f[key]
elif ext.lower() in ('.zr', '.zarr', '.n5'):
assert WITH_Z5PY or WITH_ZARR
if WITH_ZARR:
f = zarr.open(path)
ds = f[key]
elif WITH_Z5PY:
with z5py.File(path) as f:
ds = f[key]
return ds
def generate_list_for_mask(offset_file_json, output_shape_wc, path, mask_ds, n_cpus, mask_voxel_size=None):
mask = load_ds(path, mask_ds)
if mask_voxel_size is None:
if "pixelResolution" in mask.attrs:
mask_voxel_size = mask.attrs["pixelResolution"]["dimensions"]
elif "resolution" in mask.attrs:
mask_voxel_size = mask.attrs["resolution"]
else:
mask_voxel_size = (1,) * len(output_shape_wc)
logging.warning("Did not find resolution information in attributes, defaulting to {0:}".format(mask_voxel_size))
shape_wc = tuple(np.array(mask.shape) * np.array(mask_voxel_size))
complete_offset_list = _offset_list(shape_wc, output_shape_wc)
if WITH_Z5PY:
io = IoN5(path, mask_ds, voxel_size=mask_voxel_size, channel_order=None)
else:
io = IoZarr(path, mask_ds, voxel_size=mask_voxel_size, channel_order=None)
@dask.delayed()
def load_offset(offset_wc):
return load_input_crop(io, offset_wc, (0,) * len(output_shape_wc), output_shape_wc, padding_mode="constant")[0]
@dask.delayed()
def evaluate_mask(mask_block):
if np.sum(mask_block) > 0:
return True
else:
return False
offsets_mask_eval = []
for offset_wc in complete_offset_list:
keep_offset = tz.pipe(offset_wc, load_offset, evaluate_mask)
offsets_mask_eval.append((offset_wc, keep_offset))
offsets_mask_eval = dask.compute(*offsets_mask_eval, scheduler="threads", num_workers=n_cpus)
offsets_in_mask = []
for o, m in offsets_mask_eval:
if m:
offsets_in_mask.append(o)
logging.info("{0:}/{1:} blocks contained in mask, saving offsets in {2:}".format(len(offsets_in_mask),
len(complete_offset_list),
offset_file_json))
with open(offset_file_json, 'w') as f:
json.dump(offsets_in_mask, f)
def generate_full_list(offset_file_json, output_shape_wc, path, raw_ds, raw_voxel_size=None):
raw = load_ds(path, raw_ds)
if raw_voxel_size is None:
if "pixelResolution" in raw.attrs:
raw_voxel_size = raw.attrs["pixelResolution"]["dimensions"]
elif "resolution" in raw.attrs:
raw_voxel_size = raw.attrs["resolution"]
else:
raw_voxel_size = (1,) * len(output_shape_wc)
logging.warning("Did not find resolution information in attributes, defaulting to {0:}".format(raw_voxel_size))
shape_wc = tuple(np.array(raw.shape) * np.array(raw_voxel_size))
complete_offset_list = _offset_list(shape_wc, output_shape_wc)
with open(offset_file_json, "w") as f:
json.dump(complete_offset_list, f)
# this returns the offsets for the given output blocks.
# blocks are padded on the fly in the inference if necessary
def offset_list_from_precomputed(input_list,
gpu_list,
save_folder,
list_name_extension='',
randomize=False):
if isinstance(input_list, str):
with open(input_list, 'r') as f:
input_list = json.load(f)
else:
assert isinstance(input_list, list)
if randomize:
shuffle(input_list)
n_splits = len(gpu_list)
out_list = [input_list[i::n_splits] for i in range(n_splits)]
if not os.path.exists(save_folder):
os.mkdir(save_folder)
print("Original len", len(input_list))
for ii, olist in enumerate(out_list):
list_name = os.path.join(save_folder, 'list_gpu_{0:}{1:}.json'.format(gpu_list[ii], list_name_extension))
print("Dumping list number", ii, "of len", len(olist))
with open(list_name, 'w') as f:
json.dump(olist, f)
def stitch_prediction_blocks(save_path,
block_folder,
shape,
key='data',
end_channel=None,
n_workers=8,
chunks=(1, 64, 64, 64)):
from concurrent import futures
if end_channel is None:
chan_slice = (slice(None),)
else:
assert end_channel <= shape[0]
chan_slice = (slice(0, end_channel),)
def stitch_block(ds, block_id, block_file, n_blocks):
print("Stitching block %i / %i" % (block_id, n_blocks))
offsets = [int(off) for off in block_file[:-3].split('_')[1:]]
with h5py.File(os.path.join(block_folder, block_file), 'r') as g:
block_data = g['data'][:]
block_shape = block_data.shape[1:]
# Need to add slice for channel dimension
bb = chan_slice + tuple(slice(off, off + block_shape[ii])
for ii, off in enumerate(offsets))
ds[bb] = block_data
with h5py.File(save_path, 'w') as f:
ds = f.create_dataset(key,
shape=shape,
dtype='float32',
compression='gzip',
chunks=chunks)
files = os.listdir(block_folder)
# filter out invalid filenames
files = [ff for ff in files if ff.startswith('block')]
# make sure all blocks are h5 files
assert all(ff[-3:] == '.h5' for ff in files)
n_blocks = len(files)
with futures.ThreadPoolExecutor(max_workers=n_workers) as tp:
tasks = [tp.submit(stitch_block, ds, block_id, block_file, n_blocks)
for block_id, block_file in enumerate(files)]
[t.result() for t in tasks]
def extract_nn_affinities(save_prefix,
block_folder,
shape,
invert_affs=False):
from concurrent import futures
save_path_xy = save_prefix + '_xy.h5'
save_path_z = save_prefix + '_z.h5'
with h5py.File(save_path_xy, 'w') as f_xy, h5py.File(save_path_z, 'w') as f_z:
ds_xy = f_xy.create_dataset('data',
shape=shape,
dtype='float32',
compression='gzip',
chunks=(56, 56, 56))
ds_z = f_z.create_dataset('data',
shape=shape,
dtype='float32',
compression='gzip',
chunks=(56, 56, 56))
files = os.listdir(block_folder)
def extract_block(i, ff):
print("Stitching block %i / %i" % (i, len(files)))
offsets = [int(off) for off in ff[:-3].split('_')[1:]]
with h5py.File(os.path.join(block_folder, ff), 'r') as g:
block_data = g['data'][:3]
if invert_affs:
block_data = 1. - block_data
block_shape = block_data.shape[1:]
# Need to add slice for channel dimension
bb = tuple(slice(off, off + block_shape[ii]) for ii, off in enumerate(offsets))
ds_xy[bb] = (block_data[1] + block_data[2]) / 2.
ds_z[bb] = block_data[0]
with futures.ThreadPoolExecutor(max_workers=20) as tp:
tasks = []
for i, ff in enumerate(files):
if not ff.startswith('block'):
continue
assert ff[-3:] == '.h5'
tasks.append(tp.submit(extract_block, i, ff))
[t.result() for t in tasks]
def reject_empty_batch(data):
return np.sum(data) == 0
| 37.95393 | 124 | 0.57865 | 0 | 0 | 0 | 0 | 315 | 0.022492 | 0 | 0 | 1,538 | 0.109818 |
2def07ac71914db60c5520bf17ab094d2838a077 | 481 | py | Python | python/ds/MinDiffList.py | unhingedporter/DataStructureMustKnow | 3c5b3225afa2775d37a2ff90121f73208717640a | [
"MIT"
] | 3 | 2019-11-23T08:43:58.000Z | 2019-11-23T08:52:53.000Z | python/ds/MinDiffList.py | unhingedpotter/DSMustKnow | 64958cbbbb3f4cdb1104c2255e555233554503f9 | [
"MIT"
] | null | null | null | python/ds/MinDiffList.py | unhingedpotter/DSMustKnow | 64958cbbbb3f4cdb1104c2255e555233554503f9 | [
"MIT"
] | null | null | null | class MinDiffList:
# def __init__(self):
# self.diff = sys.maxint
def findMinDiff(self, arr):
arr.sort()
self.diff = arr[len(arr) - 1]
for iter in range(len(arr)):
adjacentDiff = abs(arr[iter + 1]) - abs(arr[iter])
if adjacentDiff < self.diff:
self.diff = adjacentDiff
return adjacentDiff
findDiff = MinDiffList()
print(findDiff.findMinDiff([1, 2, 3, 4, 5, 888, 100, 120, -5, 0.8]))
| 24.05 | 68 | 0.555094 | 384 | 0.798337 | 0 | 0 | 0 | 0 | 0 | 0 | 49 | 0.101871 |
2defa74f0ad5c7fdce73289dd8fcc91dc1f25cf7 | 2,075 | py | Python | ncbi/patric_add_taxonomy.py | johned0/EdwardsLab | ae0d8b51a579cd009b414d11224b4110ba13af66 | [
"MIT"
] | null | null | null | ncbi/patric_add_taxonomy.py | johned0/EdwardsLab | ae0d8b51a579cd009b414d11224b4110ba13af66 | [
"MIT"
] | null | null | null | ncbi/patric_add_taxonomy.py | johned0/EdwardsLab | ae0d8b51a579cd009b414d11224b4110ba13af66 | [
"MIT"
] | null | null | null | """
Add the taxonomy to the patric metadata file
"""
import os
import sys
import argparse
from taxon import get_taxonomy_db, get_taxonomy
c = get_taxonomy_db()
if __name__ == '__main__':
parser = argparse.ArgumentParser(description="Append taxonomy to the patric metadata file. This adds it at column 67")
parser.add_argument('-f', help='patric metadata file', required=True)
parser.add_argument('-o', help='output file', required=True)
parser.add_argument('-c', help='taxonomy ID column', required=True, type=int)
parser.add_argument('-t', help='taxonomy directory (or we will use default)')
parser.add_argument('-v', help='verbose output', action="store_true")
args = parser.parse_args()
want = ['superkingdom', 'phylum', 'class', 'order', 'family', 'genus', 'species']
# read the file once to figure out the longest line
maxp=0
with open(args.f, 'r', encoding='utf-8') as f:
for l in f:
p = l.strip().split("\t")
if len(p) > maxp:
maxp = len(p)
with open(args.o, 'w', encoding='utf-8') as out:
with open(args.f, 'r', encoding='utf-8') as f:
for l in f:
p = l.strip().split("\t")
while (len(p) < maxp):
p.append("")
if l.startswith("genome_id"):
out.write("{}\t{}\n".format(l.strip(), "\t".join(want)))
continue
tid = p[args.c]
level = {}
t, n = get_taxonomy(tid, c)
while t and t.parent > 1 and t.parent != 131567:
# 131567 is cellular organisms
if t.rank in want:
level[t.rank] = n.scientific_name
t, n = get_taxonomy(t.parent, c)
for w in want:
if w in level:
p.append(level[w])
else:
p.append("")
out.write("\t".join(map(str, p)))
out.write("\n")
| 33.467742 | 122 | 0.517108 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 498 | 0.24 |
2df494efc476f1235a7fd4bca3824c402be41058 | 11,450 | py | Python | project/save_FlowFile_BPFormat.py | wesleybowman/karsten | ef4b2d6debae605902d76cd0484e71c0ba74fdd1 | [
"MIT"
] | 1 | 2015-05-04T17:48:56.000Z | 2015-05-04T17:48:56.000Z | project/save_FlowFile_BPFormat.py | wesleybowman/karsten | ef4b2d6debae605902d76cd0484e71c0ba74fdd1 | [
"MIT"
] | null | null | null | project/save_FlowFile_BPFormat.py | wesleybowman/karsten | ef4b2d6debae605902d76cd0484e71c0ba74fdd1 | [
"MIT"
] | 1 | 2021-11-15T17:53:19.000Z | 2021-11-15T17:53:19.000Z | from __future__ import division
import numpy as np
from rawADCPclass import rawADCP
from datetime import datetime
from datetime import timedelta
import scipy.io as sio
import scipy.interpolate as sip
import matplotlib.pyplot as plt
import seaborn
def date2py(matlab_datenum):
python_datetime = datetime.fromordinal(int(matlab_datenum)) + \
timedelta(days=matlab_datenum%1) - timedelta(days = 366)
return python_datetime
def py2date(dt):
mdn = dt + timedelta(days = 366)
frac_seconds = (dt-datetime(dt.year,dt.month,dt.day,0,0,0)).seconds / (24.0 * 60.0 * 60.0)
frac_microseconds = dt.microsecond / (24.0 * 60.0 * 60.0 * 1000000.0)
return mdn.toordinal() + frac_seconds + frac_microseconds
def calc_ensemble(x, ens, ens_dim):
#initialize input
ens = int(ens)
#x = x[:, None]
if ens_dim == 1:
ens_size = np.floor(x.shape[0]/60)
else:
pass
#x_ens = np.empty((ens_size, 1, ens))
x_ens = np.empty((ens_size, ens))
x_ens[:] = np.nan
for j in xrange(ens):
if ens_dim == 1:
ind_ens = np.arange(j, x.shape[0] - (ens - j), ens)
#x_ens[..., j] = x[ind_ens]
x_ens[..., j] = x[ind_ens]
else:
pass
#x_ens = np.nanmean(x_ens, axis=2)
x_ens = np.nanmean(x_ens, axis=1)
return x_ens
def rotate_coords(x, y, theta):
'''
Similar to "rotate_to_channelcoords.m" code,
theta is now the angle
between the old axis and the new x-axis (CCw is positive)
'''
xnew = x * np.cos(theta) + y * np.sin(theta)
ynew = -x * np.sin(theta) + y * np.cos(theta)
return xnew, ynew
def rotate_to_true(X, Y, theta=-19):
'''
% X,Y are the X and Y coordinates (could be speeds) relative to magnetic
% north -- inputs can be vectors
% x,y are the coordinates relative to true north
% This function assumes the measured location is Nova Scotia where the
% declination angle is -19 degrees.
%
% Sept 29, 2012: Changed print statement
%
% Sept 20, 2012: Modified the function to allow for theta to be input.
% Default will remain at -19 degrees, but this may not be accurate for all
% places in Nova Scotia.
'''
print 'Rotating velocities to be relative to true north (declination = {0})'.format(theta)
Theta = theta * np.pi / 180
x = X * np.cos(Theta) + Y * np.sin(Theta)
y = -X * np.sin(Theta) + Y * np.cos(Theta)
return x, y
def get_DirFromN(u,v):
'''
#This function computes the direction from North with the output in degrees
#and measured clockwise from north.
#
# Inputs:
# u: eastward component
# v: northward component
'''
theta = np.arctan2(u,v) * 180 / np.pi
ind = np.where(theta<0)
theta[ind] = theta[ind] + 360
return theta
def sign_speed(u_all, v_all, s_all, dir_all, flood_heading):
if type(flood_heading)==int:
flood_heading += np.array([-90, 90])
s_signed_all = np.empty(s_all.shape)
s_signed_all.fill(np.nan)
PA_all = np.zeros(s_all.shape[-1])
for i in xrange(s_all.shape[-1]):
u = u_all[:, i]
v = v_all[:, i]
dir = dir_all[:, i]
s = s_all[:, i]
#determine principal axes - potentially a problem if axes are very kinked
# since this would misclassify part of ebb and flood
PA, _ = principal_axis(u, v)
PA_all[i] = PA
# sign speed - eliminating wrap-around
dir_PA = dir - PA
dir_PA[dir_PA < -90] += 360
dir_PA[dir_PA > 270] -= 360
#general direction of flood passed as input argument
if flood_heading[0] <= PA <= flood_heading[1]:
ind_fld = np.where((dir_PA >= -90) & (dir_PA<90))
s_signed = -s
s_signed[ind_fld] = s[ind_fld]
else:
ind_ebb = np.where((dir_PA >= -90) & (dir_PA<90))
s_signed = s
s_signed[ind_ebb] = -s[ind_ebb]
s_signed_all[:, i] = s_signed
return s_signed_all, PA_all
def principal_axis(u, v):
#create velocity matrix
U = np.vstack((u,v)).T
#eliminate NaN values
U = U[~np.isnan(U[:, 0]), :]
#convert matrix to deviate form
rep = np.tile(np.mean(U, axis=0), [len(U), 1])
U -= rep
#compute covariance matrix
R = np.dot(U.T, U) / (len(U) - 1)
#calculate eigenvalues and eigenvectors for covariance matrix
lamb, V = np.linalg.eig(R)
#sort eignvalues in descending order so that major axis is given by first eigenvector
# sort in descending order with indices
ilamb = sorted(range(len(lamb)), key=lambda k: lamb[k], reverse=True)
lamb = sorted(lamb, reverse=True)
# reconstruct the eigenvalue matrix
lamb = np.diag(lamb)
#reorder the eigenvectors
V = V[:, ilamb]
#rotation angle of major axis in radians relative to cartesian coordiantes
ra = np.arctan2(V[0,1], V[1,1])
#express principal axis in compass coordinates
# WES_COMMENT: may need to change this, cause in original is -ra
PA = ra * 180 / np.pi + 90
#variance captured by principal
varxp_PA = np.diag(lamb[0]) / np.trace(lamb)
return PA, varxp_PA
class Struct:
def __init__(self, **entries):
self.__dict__.update(entries)
def save_FlowFile_BPFormat(fileinfo, adcp, rbr, params, options, debug=False):
comments = ['data is in Polagye Tools format',
'data.east_vel and data.north_vel are relative to true north',
'The parameters were set by ' + fileinfo['paramfile']]
day1 = date2py(adcp['mtime'][0][0])
print day1
#date_time = [date2py(tval[0]) for tval in adcp.mtime[:]]
datenum = datetime(day1.year,1,1) + timedelta(365)
datenum = datenum.toordinal()
yd = adcp['mtime'][:].flatten() - datenum
tind = np.where((yd > params['tmin']) & (yd < params['tmax']))[0]
pres = {}
time = {}
time['mtime'] = adcp['mtime'][:].flatten()[tind]
dt = np.nanmean(np.diff(time['mtime']))
if not rbr:
print 'Depths measured by ADCP not yet coded.'
comments.append('Depths as measured by ADCP')
else:
print 'Ensemble averaging rbr data'
comments.append('Depths as measured by RBR sensor')
nens = round(dt/(rbr.mtime[1] - rbr.mtime[0]))
temp = np.arange(rbr.mtime[nens/2-1], rbr.mtime[-1-nens/2], dt)
#temp2 = np.r_[rbr.mtime[nens/2-1]: rbr.mtime[-1-nens/2]: dt]
mtimeens = np.arange(rbr.mtime[nens/2-1], rbr.mtime[-1-nens/2], dt)
mtimeens = mtimeens + params['rbr_hr_offset'] / 24
depthens = calc_ensemble(rbr.depth, nens, 1)
temp = sip.interp1d(mtimeens, depthens, kind='linear')
pres['surf']= temp(time['mtime']) + params['dabPS']
if debug:
# Load in matlab values for testing
filename = './140703-EcoEII_database/scripts_examples/mtime.mat'
mat = sio.loadmat(filename, struct_as_record=False, squeeze_me=True)
matTimes = mat['mtimeens']
filename = './140703-EcoEII_database/scripts_examples/dt.mat'
mat = sio.loadmat(filename, struct_as_record=False, squeeze_me=True)
matdt = mat['dt']
filename = './140703-EcoEII_database/scripts_examples/depthens.mat'
mat = sio.loadmat(filename, struct_as_record=False, squeeze_me=True)
matdepthens = mat['depthens']
filename = './140703-EcoEII_database/scripts_examples/time.mat'
mat = sio.loadmat(filename, struct_as_record=False, squeeze_me=True)
matmtime = mat['mtime']
print matTimes.shape
print temp - matTimes
print temp2 - matTimes
print dt - matdt
print depthens - matdepthens
print 'time'
print time['mtime'] - matmtime
## zlevels
data = {}
z = adcp['config']['ranges'][:] + params['dabADCP']
z = z.flatten()
zind = np.where((z > params['zmin']) & (z < params['zmax']))[0]
data['bins'] = z[zind]
## Currents
data['vert_vel'] = adcp['vert_vel'][:][tind][:, zind]
data['error_vel'] = adcp['error_vel'][:][tind][:, zind]
# If compass wasn't calibrated
if 'hdgmod' in params:
adcp['east_vel'][:], adcp['north_vel'][:] = rotate_coords(adcp['east_vel'][:],
adcp['north_vel'][:],
params['hdgmod'])
comments.append('East and north velocity rotated by params.hdgmod')
# Rotate east_vel and north_vel to be relative to true north
data['east_vel'], data['north_vel'] = \
rotate_to_true(adcp['east_vel'][:][tind][:, zind],
adcp['north_vel'][:][tind][:, zind],
params['declination'])
# Direction
data['dir_vel'] = get_DirFromN(data['east_vel'],data['north_vel'])
# Signed Speed
spd_all = np.sqrt(data['east_vel']**2+data['north_vel']**2)
# Determine flood and ebb based on principal direction (Polagye Routine)
print 'Getting signed speed (Principal Direction Method) -- used all speeds'
s_signed_all, PA_all = sign_speed(data['east_vel'], data['north_vel'],
spd_all, data['dir_vel'], params['flooddir'])
data['mag_signed_vel'] = s_signed_all
if options['showRBRavg'] or debug:
print 'Plotting RBR vs average'
plt.plot(rbr.mtime + params['rbr_hr_offset'] / 24, rbr.depth+params['dabPS'],
label='RBR')
plt.plot(time['mtime'], pres['surf'], 'r', label='AVG')
plt.xlabel('Time')
plt.ylabel('Elevation')
plt.legend(bbox_to_anchor=(0, 0, 1, 1), bbox_transform=plt.gcf().transFigure)
plt.show()
if options['showPA'] or debug:
print 'Plotting PA vs mean'
plt.plot(PA_all, data['bins'], label='PA')
plt.plot(np.array([PA_all[0], PA_all[-1]]),
np.array([np.mean(pres['surf']), np.mean(pres['surf'])]),
label='mean')
plt.xlabel('Principal Axis Direction\n(clockwise from north)')
plt.ylabel('z (m)')
plt.legend(bbox_to_anchor=(0, 0, 1, 1), bbox_transform=plt.gcf().transFigure)
plt.show()
## save
lon = params['lon']
lat = params['lat']
outfile = fileinfo['outdir'] + fileinfo['flowfile']
print 'Saving data to {0}'.format(outfile)
saveDict = {'data':data, 'pres':pres, 'time':time, 'lon':lon, 'lat':lat,
'params':params, 'comments':comments}
#save(outfile,'data','pres','time','lon','lat','params','Comments')
## Save metadata
#metadata.progname=[mfilename('fullpath')];
#metadata.date = datestr(now);
#metadata.paramfile = fileinfo.paramfile;
#save(outfile,'metadata','-append')
return saveDict
if __name__ == '__main__':
filename = '140703-EcoEII_database/data/GP-120726-BPd_raw.mat'
data = rawADCP(filename)
rawdata = rawADCP(filename)
#adcp = Struct(**data.adcp)
#rawADCP = data.adcp
adcp = data.adcp
#params = Struct(**data.saveparams)
params = data.saveparams
rbr = Struct(**data.rbr)
# save_FlowFile_BPFormat(data.fileinfo, data.adcp, data.rbr,
# data.saveparams, data.options)
saveDict = \
save_FlowFile_BPFormat(data.fileinfo, adcp, rbr,
params, data.options)
| 32.902299 | 94 | 0.599301 | 86 | 0.007511 | 0 | 0 | 0 | 0 | 0 | 0 | 4,081 | 0.356419 |
2df53d0c6968d61a6511d518ea06ce30084abc51 | 1,923 | py | Python | Module1/Day06/module1_day06_lists.py | datsaloglou/100DaysPython | b6a90bfa5260fb2cb66da9eceaafda9a53c76c0f | [
"MIT"
] | 1 | 2019-06-02T12:16:52.000Z | 2019-06-02T12:16:52.000Z | Module1/Day06/module1_day06_lists.py | datsaloglou/100DaysPython | b6a90bfa5260fb2cb66da9eceaafda9a53c76c0f | [
"MIT"
] | null | null | null | Module1/Day06/module1_day06_lists.py | datsaloglou/100DaysPython | b6a90bfa5260fb2cb66da9eceaafda9a53c76c0f | [
"MIT"
] | null | null | null | """
Author: CaptCorpMURICA
Project: 100DaysPython
File: module1_day06_lists.py
Creation Date: 6/2/2019, 8:55 AM
Description: Learn the basic of lists in python.
"""
list_1 = []
list_2 = list()
print("List 1 Type: {}\nList 2 Type: {}".format(type(list_1), type(list_2)))
text = "Luggage Combination"
print(list(text))
luggage = [1, 3, 5, 2, 4]
luggage.sort()
print(luggage)
numbers = [1, 2, 3, 4, 5]
numbers_sorted = numbers
numbers_sorted.sort(reverse=True)
print("numbers: {}\nnumbers_sorted: {}".format(numbers, numbers_sorted))
numbers = [1, 2, 3, 4, 5]
numbers_sorted = list(numbers)
numbers_sorted.sort(reverse=True)
print("numbers: {}\nnumbers_sorted: {}".format(numbers, numbers_sorted))
odd = [1, 3, 5]
even = [2, 4]
luggage = odd + even
print(luggage)
luggage = [1, 3, 5]
even = [2, 4]
luggage.extend(even)
print(luggage)
odd = [1, 3, 5]
even = [2, 4]
luggage = odd + even
print("Unsorted list: {}".format(luggage))
print("Using the sorted() function: {}".format(sorted(luggage)))
luggage.sort()
print("Using the .sort() method: {}".format(luggage))
lines = []
lines.append("They told me to comb the desert, so I'm combing the desert")
lines.append("YOGURT! I hate Yogurt! Even with strawberries!")
lines.append("We'll meet again in Spaceballs 2 : The Quest for More Money.")
print(lines)
luggage = [1, 2, 3, 4, 5]
print(luggage.index(2))
quote = list("YOGURT! I hate Yogurt! Even with strawberries!")
print(quote.count("r"))
luggage = [1, 2, 4, 5]
luggage.insert(2, 3)
print(luggage)
luggage = [1, 2, 3, 3, 4, 5, 6]
luggage.pop()
print(luggage)
luggage.pop(2)
print(luggage)
rng = list(range(0,10))
rng.remove(7)
print(rng)
countdown = [5, 4, 3, 2, 1]
countdown.reverse()
print(countdown)
sample = list(range(1,13))
times_12 = [i * 12 for i in sample]
print(times_12)
luggage.clear()
print(luggage)
luggage = [2, 2, 3, 4, 5]
luggage[0] = 1
print(luggage)
| 27.084507 | 76 | 0.667707 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 637 | 0.331253 |
2df53ea42aed1c3338e8b5cf86c170ffdfe06678 | 131 | py | Python | magicdice/__init__.py | emre/magicdice | 326512696b766248cfb8998aec5b0bd8bff393ba | [
"MIT"
] | 5 | 2019-02-26T21:59:21.000Z | 2019-05-28T06:32:06.000Z | magicdice/__init__.py | emre/magicdice | 326512696b766248cfb8998aec5b0bd8bff393ba | [
"MIT"
] | null | null | null | magicdice/__init__.py | emre/magicdice | 326512696b766248cfb8998aec5b0bd8bff393ba | [
"MIT"
] | null | null | null | class MagicDice:
def __init__(self, account, active_key):
self.account = account
self.active_key = active_key
| 21.833333 | 44 | 0.671756 | 130 | 0.992366 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
2df5a62d162481addcad6f25157f37024121dd7c | 708 | py | Python | Interview Preparation Kit/01 - Warm-up Challenges/04 - Repeated String.py | srgeyK87/Hacker-Rank-30-days-challlenge | 4a929935148db493057dd3a5a88e77efcc0fd086 | [
"MIT"
] | 275 | 2020-04-10T18:01:13.000Z | 2022-03-23T15:03:41.000Z | Interview Preparation Kit/01 - Warm-up Challenges/04 - Repeated String.py | rakshit6432/HackerRank-Solutions | 9fd5b295f03cc80b77e80810bb6b7b6acd160e79 | [
"MIT"
] | 2 | 2020-12-20T10:55:40.000Z | 2021-05-20T12:52:53.000Z | Interview Preparation Kit/01 - Warm-up Challenges/04 - Repeated String.py | rakshit6432/HackerRank-Solutions | 9fd5b295f03cc80b77e80810bb6b7b6acd160e79 | [
"MIT"
] | 196 | 2020-09-26T16:24:43.000Z | 2022-03-28T10:40:05.000Z | # ========================
# Information
# ========================
# Direct Link: https://www.hackerrank.com/challenges/repeated-string/problem
# Difficulty: Easy
# Max Score: 20
# Language: Python
# ========================
# Solution
# ========================
import os
# Complete the repeatedString function below.
def repeatedString(s, n):
count_1 = n//len(s) * s.count('a')
remained_string = n%len(s)
count_2 = s[:remained_string].count('a')
return count_1 + count_2
if __name__ == '__main__':
fptr = open(os.environ['OUTPUT_PATH'], 'w')
s = input()
n = int(input())
result = repeatedString(s, n)
fptr.write(str(result) + '\n')
fptr.close()
| 22.83871 | 76 | 0.542373 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 349 | 0.492938 |
2df6288c891a7e36cd3f5221f22fe8e36ca68a70 | 6,056 | py | Python | regress/PORT_ME_TESTS/tests-glen.py | fp7-ofelia/VeRTIGO | 11f39f819196c8352611852435dea17bc6a2292f | [
"BSD-3-Clause"
] | 2 | 2016-10-12T08:20:00.000Z | 2017-05-09T13:13:18.000Z | regress/PORT_ME_TESTS/tests-glen.py | fp7-ofelia/VeRTIGO | 11f39f819196c8352611852435dea17bc6a2292f | [
"BSD-3-Clause"
] | null | null | null | regress/PORT_ME_TESTS/tests-glen.py | fp7-ofelia/VeRTIGO | 11f39f819196c8352611852435dea17bc6a2292f | [
"BSD-3-Clause"
] | 1 | 2020-10-01T07:57:34.000Z | 2020-10-01T07:57:34.000Z | #!/usr/bin/python
from fvregress import *
import string # really? you have to do this?
if len(sys.argv) > 1 :
wantPause = True
timeout=9999999
valgrindArgs= []
else:
wantPause = False
timeout=5
valgrindArgs= None
# start up a flowvisor with 1 switch (default) and two guests
#h= HyperTest(guests=[('localhost',54321)],
# hyperargs=["-v0", "-a", "flowvisor-conf.d-glen", "ptcp:%d"% HyperTest.OFPORT],valgrind=valgrindArgs)
h = FvRegress.parseConfig(configDir='flowvisor-conf.d-glen', valgrind=valgrindArgs)
if wantPause:
doPause("start tests")
#################################### Start Tests
try:
feature_request = FvRegress.OFVERSION + '05 0008 2d47 c5eb'
feature_request_after = FvRegress.OFVERSION + '05 0008 0001 0000'
h.runTest(name="feature_request",timeout=timeout, events= [
TestEvent( "send","guest","openpipes", feature_request),
TestEvent( "recv","switch","switch1", feature_request_after),
])
##############################################################
#flow_mod_del_all = FvRegress.OFVERSION + '''0e0048efbefeca000fffff00000000000000000000000000000000000000000000000000000000000000000003000000000000ffffffffffff000000000000'''
flow_mod_del_all = FvRegress.OFVERSION + '''0e0048084a06b6ffffffff68e1b2080100000005f04e4f3cc1f9237cb79b6494c75a277565383900ecb2b70000000000000000000000000003000000000000ffff000000000000'''
flow_mod_del_all_1 = FvRegress.OFVERSION + '''0e004802010000003820f768e100000000000000ff000000000000000000000000000000000000000000000000000000000000000000000003000000000000ffff000000000000'''
flow_mod_del_all_2 = FvRegress.OFVERSION + '''0e004802010000003820f768e100000000000000ff000000010000000000000000000000000000000000000000000000000000000000000003000000000000ffff000000000000'''
flow_mod_del_all_3 = FvRegress.OFVERSION + '''0e004802010000003820f768e100000000000000ff000000020000000000000000000000000000000000000000000000000000000000000003000000000000ffff000000000000'''
flow_mod_del_all_4 = FvRegress.OFVERSION + '''0e004802010000003820f768e100000000000000ff000000030000000000000000000000000000000000000000000000000000000000000003000000000000ffff000000000000'''
flow_mod_del_all_5 = FvRegress.OFVERSION + '''0e004802010000003820f768e100000000000000ff000000040000000000000000000000000000000000000000000000000000000000000003000000000000ffff000000000000'''
flow_mod_del_all_6 = FvRegress.OFVERSION + '''0e004802010000003820f768e100000000000000ff000000050000000000000000000000000000000000000000000000000000000000000003000000000000ffff000000000000'''
flow_mod_del_all_7 = FvRegress.OFVERSION + '''0e004802010000003820f768e100000000000000000c0000110000000000000000000000000000000000000000000000000000000000000003000000000000ffff000000000000'''
flow_mod_del_all_8 = FvRegress.OFVERSION + '''0e004802010000003820f768e100000000000000188b2700010000000000000000000000000000000000000000000000000000000000000003000000000000ffff000000000000'''
h.runTest(name="flow mod del all ", timeout=timeout, events= [
TestEvent( "send","guest","openpipes", flow_mod_del_all),
TestEvent( "recv","switch","switch1", flow_mod_del_all_1),
TestEvent( "recv","switch","switch1", flow_mod_del_all_2),
TestEvent( "recv","switch","switch1", flow_mod_del_all_3),
TestEvent( "recv","switch","switch1", flow_mod_del_all_4),
TestEvent( "recv","switch","switch1", flow_mod_del_all_5),
TestEvent( "recv","switch","switch1", flow_mod_del_all_6),
TestEvent( "recv","switch","switch1", flow_mod_del_all_7),
TestEvent( "recv","switch","switch1", flow_mod_del_all_8),
])
##############################################################
flow_mod = FvRegress.OFVERSION + '''0e0048000000000ffffffe00060000000000000000000000000000000000000000000000000000000000000000000000008000ffffffffffff06e8000000000000000800010000'''
flow_mod_exp1 = FvRegress.OFVERSION + '''0e004803010000003820f6000600000000000000ff000000000000000000000000000000000000000000000000000000008000ffffffffffff06e8000000000000000800010000'''
flow_mod_exp2 = FvRegress.OFVERSION + '''0e004803010000003820f6000600000000000000ff000000010000000000000000000000000000000000000000000000008000ffffffffffff06e8000000000000000800010000'''
flow_mod_exp3 = FvRegress.OFVERSION + '''0e004803010000003820f6000600000000000000ff000000020000000000000000000000000000000000000000000000008000ffffffffffff06e8000000000000000800010000'''
flow_mod_exp4 = FvRegress.OFVERSION + '''0e004803010000003820f6000600000000000000ff000000030000000000000000000000000000000000000000000000008000ffffffffffff06e8000000000000000800010000'''
flow_mod_exp5 = FvRegress.OFVERSION + '''0e004803010000003820f6000600000000000000ff000000040000000000000000000000000000000000000000000000008000ffffffffffff06e8000000000000000800010000'''
flow_mod_exp6 = FvRegress.OFVERSION + '''0e004803010000003820f6000600000000000000ff000000050000000000000000000000000000000000000000000000008000ffffffffffff06e8000000000000000800010000'''
flow_mod_exp7 = FvRegress.OFVERSION + '''0e004803010000003820f6000600000000000000000c0000110000000000000000000000000000000000000000000000008000ffffffffffff06e8000000000000000800010000'''
flow_mod_exp8 = FvRegress.OFVERSION + '''0e004803010000003820f6000600000000000000188b2700010000000000000000000000000000000000000000000000008000ffffffffffff06e8000000000000000800010000'''
h.runTest(name="glen test 1", timeout=timeout, events= [
TestEvent( "send","guest","openpipes", flow_mod),
TestEvent( "recv","switch","switch1", flow_mod_exp1),
TestEvent( "recv","switch","switch1", flow_mod_exp2),
TestEvent( "recv","switch","switch1", flow_mod_exp3),
TestEvent( "recv","switch","switch1", flow_mod_exp4),
TestEvent( "recv","switch","switch1", flow_mod_exp5),
TestEvent( "recv","switch","switch1", flow_mod_exp6),
TestEvent( "recv","switch","switch1", flow_mod_exp7),
TestEvent( "recv","switch","switch1", flow_mod_exp8),
])
#########################################
# more tests for this setup HERE
#################################### End Tests
finally:
if wantPause:
doPause("start cleanup")
h.cleanup()
| 67.288889 | 192 | 0.803831 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3,984 | 0.65786 |
2df6451745f6199069fa0651e5f50fd52805e8f8 | 1,325 | py | Python | pseudo/__init__.py | pniedzwiedzinski/pseudo | b27570bd8400b6a51a2958454b31f1ce2e25c4f9 | [
"MIT"
] | 5 | 2019-04-02T07:01:34.000Z | 2019-11-24T02:08:03.000Z | pseudo/__init__.py | pniedzwiedzinski/pseudo | b27570bd8400b6a51a2958454b31f1ce2e25c4f9 | [
"MIT"
] | 11 | 2019-03-20T08:29:30.000Z | 2019-05-21T11:57:03.000Z | pseudo/__init__.py | pniedzwiedzinski/pseudo | b27570bd8400b6a51a2958454b31f1ce2e25c4f9 | [
"MIT"
] | 1 | 2019-04-02T15:24:40.000Z | 2019-04-02T15:24:40.000Z | """
Writing actual code might be hard to understand for new-learners. Pseudocode is a tool
for writing algorithms without knowing how to code. This module contains classes and
methods for parsing pseudocode to AST and then evaluating it.
Example:
If you installed this module with pip you can run pseudocode from file, i.e. to run
`test.pdc` file type::
$ pdc test.pdc
If you want to parse it by your own you will need `pseudo.lexer.Lexer` instance.::
from pseudo.lexer import Lexer
lex = Lexer("x := 12")
expression = lex.read_next()
print(expression)
If lexer reach the end of input, the `pseudo.stream.EndOfFile` exception will be raised.
"""
__author__ = "Patryk Niedźwiedziński"
__version__ = "0.11.0"
import gc
from pseudo.lexer import Lexer
from pseudo.stream import EndOfFile
from pseudo.utils import append
def compile(text_input: str, range_symbol: str = "...") -> list:
"""Compile from string to list of operations."""
lexer = Lexer(text_input)
lexer.range_symbol = range_symbol
x = None
instructions = []
while True:
try:
x = lexer.read_next(prev=x)
except EndOfFile:
break
instructions = append(instructions, x)
del lexer
gc.collect()
return instructions
| 24.537037 | 92 | 0.675472 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 795 | 0.599096 |
2df6c6319b7d1906f84dc0b600c9446955b1aa9f | 320 | py | Python | adabru_talon/code/deep_sleep.py | adabru/speech | 74a4398693186f5b90ac5b3252aa7bd2764fa695 | [
"WTFPL"
] | null | null | null | adabru_talon/code/deep_sleep.py | adabru/speech | 74a4398693186f5b90ac5b3252aa7bd2764fa695 | [
"WTFPL"
] | null | null | null | adabru_talon/code/deep_sleep.py | adabru/speech | 74a4398693186f5b90ac5b3252aa7bd2764fa695 | [
"WTFPL"
] | null | null | null | from talon import (
Module,
Context,
)
mod = Module()
mod.tag("deep_sleep", desc="Enable deep sleep")
ctx = Context()
@mod.action_class
class Actions:
def enable_deep_sleep():
"""???"""
ctx.tags = ["user.deep_sleep"]
def disable_deep_sleep():
"""???"""
ctx.tags = []
| 15.238095 | 47 | 0.55625 | 171 | 0.534375 | 0 | 0 | 189 | 0.590625 | 0 | 0 | 66 | 0.20625 |
2df782b87d6da8a3b61ad938472a58ca41cb66f2 | 396 | py | Python | stubs/esp32_1_10_0/upip_utarfile.py | jmannau/micropython-stubber | 8930e8a0038192fd259b31a193d1da3b2501256a | [
"MIT"
] | null | null | null | stubs/esp32_1_10_0/upip_utarfile.py | jmannau/micropython-stubber | 8930e8a0038192fd259b31a193d1da3b2501256a | [
"MIT"
] | null | null | null | stubs/esp32_1_10_0/upip_utarfile.py | jmannau/micropython-stubber | 8930e8a0038192fd259b31a193d1da3b2501256a | [
"MIT"
] | null | null | null | "Module 'upip_utarfile' on firmware 'v1.10-247-g0fb15fc3f on 2019-03-29'"
DIRTYPE = 'dir'
class FileSection(): ...
def read():
pass
def readinto():
pass
def skip():
pass
REGTYPE = 'file'
TAR_HEADER = None
class TarFile(): ...
def extractfile():
pass
def next():
pass
class TarInfo(): ...
def roundup():
pass
uctypes = None
| 13.2 | 73 | 0.560606 | 64 | 0.161616 | 0 | 0 | 0 | 0 | 0 | 0 | 84 | 0.212121 |
2df89db144b707f4b0822c6ae9ce1acc6ef701ad | 482 | py | Python | core/src/zeit/content/author/browser/interfaces.py | rickdg/vivi | 16134ac954bf8425646d4ad47bdd1f372e089355 | [
"BSD-3-Clause"
] | 5 | 2019-05-16T09:51:29.000Z | 2021-05-31T09:30:03.000Z | core/src/zeit/content/author/browser/interfaces.py | rickdg/vivi | 16134ac954bf8425646d4ad47bdd1f372e089355 | [
"BSD-3-Clause"
] | 107 | 2019-05-24T12:19:02.000Z | 2022-03-23T15:05:56.000Z | core/src/zeit/content/author/browser/interfaces.py | rickdg/vivi | 16134ac954bf8425646d4ad47bdd1f372e089355 | [
"BSD-3-Clause"
] | 3 | 2020-08-14T11:01:17.000Z | 2022-01-08T17:32:19.000Z | from zeit.cms.i18n import MessageFactory as _
import zope.formlib.interfaces
import zope.interface
@zope.interface.implementer(zope.formlib.interfaces.IWidgetInputError)
class DuplicateAuthorWarning(Exception):
def doc(self):
return _(
u'An author with the given name already exists. '
u'If you\'d like to create another author with the same '
u'name anyway, check "Add duplicate author" '
u'and save the form again.')
| 32.133333 | 70 | 0.692946 | 309 | 0.641079 | 0 | 0 | 380 | 0.788382 | 0 | 0 | 178 | 0.369295 |
2df8c42e864589ea93f8316ea84d6115885699fb | 237 | py | Python | Skyhero-admin/Day 29/exp8_3.py | adityajoshi-08/100-Days-of-Code | 22026ca37dd8bcf5e5e22a8a302510249936c002 | [
"MIT"
] | 33 | 2022-01-11T14:00:24.000Z | 2022-03-24T02:49:44.000Z | Skyhero-admin/Day 29/exp8_3.py | adityajoshi-08/100-Days-of-Code | 22026ca37dd8bcf5e5e22a8a302510249936c002 | [
"MIT"
] | 28 | 2022-01-11T17:08:52.000Z | 2022-03-15T17:03:15.000Z | Skyhero-admin/Day 29/exp8_3.py | adityajoshi-08/100-Days-of-Code | 22026ca37dd8bcf5e5e22a8a302510249936c002 | [
"MIT"
] | 57 | 2022-01-11T15:54:07.000Z | 2022-03-27T04:37:42.000Z | def countWord(word):
count = 0
with open('test.txt') as file:
for line in file:
if word in line:
count += line.count(word)
return count
word = input('Enter word: ')
count = countWord(word)
print(word, '- occurence: ', count) | 21.545455 | 35 | 0.658228 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 39 | 0.164557 |
2df9a0c0fb58029492e4ab2e5a697ee378f5037d | 9,190 | py | Python | utils.py | khangt1k25/Clustering-Segmentation | 4a7262c9d12fac4f595c8d0740342017b498262b | [
"MIT"
] | null | null | null | utils.py | khangt1k25/Clustering-Segmentation | 4a7262c9d12fac4f595c8d0740342017b498262b | [
"MIT"
] | null | null | null | utils.py | khangt1k25/Clustering-Segmentation | 4a7262c9d12fac4f595c8d0740342017b498262b | [
"MIT"
] | null | null | null | import random
import os
import logging
import pickle
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.backends.cudnn as cudnn
# import faiss
################################################################################
# General-purpose #
################################################################################
def str_list(l):
return '_'.join([str(x) for x in l])
def set_logger(log_path):
logger = logging.getLogger()
logger.setLevel(logging.INFO)
# Logging to a file
file_handler = logging.FileHandler(log_path)
file_handler.setFormatter(logging.Formatter('%(asctime)s:%(levelname)s: %(message)s'))
logger.addHandler(file_handler)
# Logging to console
stream_handler = logging.StreamHandler()
stream_handler.setFormatter(logging.Formatter('%(message)s'))
logger.addHandler(stream_handler)
return logger
class Logger(object):
""" Class to update every epoch to keep trace of the results
Methods:
- log() log and save
"""
def __init__(self, path):
self.path = path
self.data = []
def log(self, train_point):
self.data.append(train_point)
with open(os.path.join(self.path), 'wb') as fp:
pickle.dump(self.data, fp, -1)
class AverageMeter(object):
"""Computes and stores the average and current value"""
def __init__(self):
self.reset()
def reset(self):
self.val = 0
self.avg = 0
self.sum = 0
self.count = 0
def update(self, val, n=1):
self.val = val
self.sum += val * n
self.count += n
self.avg = self.sum / self.count
def get_datetime(time_delta):
days_delta = time_delta // (24*3600)
time_delta = time_delta % (24*3600)
hour_delta = time_delta // 3600
time_delta = time_delta % 3600
mins_delta = time_delta // 60
time_delta = time_delta % 60
secs_delta = time_delta
return '{}:{}:{}:{}'.format(days_delta, hour_delta, mins_delta, secs_delta)
################################################################################
# Metric-related ops #
################################################################################
def _fast_hist(label_true, label_pred, n_class):
mask = (label_true >= 0) & (label_true < n_class) # Exclude unlabelled data.
hist = np.bincount(n_class * label_true[mask] + label_pred[mask],\
minlength=n_class ** 2).reshape(n_class, n_class)
return hist
def scores(label_trues, label_preds, n_class):
hist = np.zeros((n_class, n_class))
for lt, lp in zip(label_trues, label_preds):
hist += _fast_hist(lt.flatten(), lp.flatten(), n_class)
return hist
def get_result_metrics(histogram):
tp = np.diag(histogram)
fp = np.sum(histogram, 0) - tp
fn = np.sum(histogram, 1) - tp
iou = tp / (tp + fp + fn)
prc = tp / (tp + fn)
opc = np.sum(tp) / np.sum(histogram)
result = {"iou": iou,
"mean_iou": np.nanmean(iou),
"precision_per_class (per class accuracy)": prc,
"mean_precision (class-avg accuracy)": np.nanmean(prc),
"overall_precision (pixel accuracy)": opc}
result = {k: 100*v for k, v in result.items()}
return result
def compute_negative_euclidean(featmap, centroids, metric_function):
centroids = centroids.unsqueeze(-1).unsqueeze(-1)
return - (1 - 2*metric_function(featmap)\
+ (centroids*centroids).sum(dim=1).unsqueeze(0)) # negative l2 squared
def get_metric_as_conv(centroids):
N, C = centroids.size()
centroids_weight = centroids.unsqueeze(-1).unsqueeze(-1)
metric_function = nn.Conv2d(C, N, 1, padding=0, stride=1, bias=False)
metric_function.weight.data = centroids_weight
metric_function = nn.DataParallel(metric_function)
metric_function = metric_function.cuda()
return metric_function
################################################################################
# General torch ops #
################################################################################
def freeze_all(model):
for param in model.module.parameters():
param.requires_grad = False
def initialize_classifier(args):
classifier = get_linear(args.in_dim, args.K_train)
classifier = nn.DataParallel(classifier)
classifier = classifier.cuda()
return classifier
def get_linear(indim, outdim):
classifier = nn.Conv2d(indim, outdim, kernel_size=1, stride=1, padding=0, bias=True)
classifier.weight.data.normal_(0, 0.01)
classifier.bias.data.zero_()
return classifier
def feature_flatten(feats):
if len(feats.size()) == 2:
# feature already flattened.
return feats
feats = feats.view(feats.size(0), feats.size(1), -1).transpose(2, 1)\
.contiguous().view(-1, feats.size(1))
return feats
################################################################################
# Faiss related #
################################################################################
def get_faiss_module(args):
res = faiss.StandardGpuResources()
cfg = faiss.GpuIndexFlatConfig()
cfg.useFloat16 = False
cfg.device = 0 #NOTE: Single GPU only.
idx = faiss.GpuIndexFlatL2(res, args.in_dim, cfg)
return idx
def get_init_centroids(args, K, featlist, index):
clus = faiss.Clustering(args.in_dim, K)
clus.seed = np.random.randint(args.seed)
clus.niter = args.kmeans_n_iter
clus.max_points_per_centroid = 10000000
clus.train(featlist, index)
return faiss.vector_float_to_array(clus.centroids).reshape(K, args.in_dim)
def module_update_centroids(index, centroids):
index.reset()
index.add(centroids)
return index
def fix_seed_for_reproducability(seed):
"""
Unfortunately, backward() of [interpolate] functional seems to be never deterministic.
Below are related threads:
https://github.com/pytorch/pytorch/issues/7068
https://discuss.pytorch.org/t/non-deterministic-behavior-of-pytorch-upsample-interpolate/42842?u=sbelharbi
"""
# Use random seed.
random.seed(seed)
os.environ['PYTHONHASHSEED'] = str(seed)
np.random.seed(seed)
torch.manual_seed(seed)
torch.cuda.manual_seed(seed)
torch.cuda.manual_seed_all(seed)
cudnn.deterministic = True
cudnn.benchmark = False
def worker_init_fn(seed):
return lambda x: np.random.seed(seed + x)
################################################################################
# Training Pipelines #
################################################################################
def postprocess_label(args, K, idx, idx_img, scores, n_dual):
out = scores[idx].topk(1, dim=0)[1].flatten().detach().cpu().numpy()
# Save labels.
if not os.path.exists(os.path.join(args.save_model_path, 'label_' + str(n_dual))):
os.makedirs(os.path.join(args.save_model_path, 'label_' + str(n_dual)))
torch.save(out, os.path.join(args.save_model_path, 'label_' + str(n_dual), '{}.pkl'.format(idx_img)))
# Count for re-weighting.
counts = torch.tensor(np.bincount(out, minlength=K)).float()
return counts
def eqv_transform_if_needed(args, dataloader, indice, input):
if args.equiv:
input = dataloader.dataset.transform_eqv(indice, input)
return input
def get_transform_params(args):
inv_list = []
eqv_list = []
if args.augment:
if args.blur:
inv_list.append('blur')
if args.grey:
inv_list.append('grey')
if args.jitter:
inv_list.extend(['brightness', 'contrast', 'saturation', 'hue'])
if args.equiv:
if args.h_flip:
eqv_list.append('h_flip')
if args.v_flip:
eqv_list.append('v_flip')
if args.random_crop:
eqv_list.append('random_crop')
return inv_list, eqv_list
def collate_train(batch):
if batch[0][-1] is not None:
indice = [b[0] for b in batch]
image1 = torch.stack([b[1] for b in batch])
image2 = torch.stack([b[2] for b in batch])
label1 = torch.stack([b[3] for b in batch])
label2 = torch.stack([b[4] for b in batch])
return indice, image1, image2, label1, label2
indice = [b[0] for b in batch]
image1 = torch.stack([b[1] for b in batch])
return indice, image1
def collate_eval(batch):
indice = [b[0] for b in batch]
image = torch.stack([b[1] for b in batch])
label = torch.stack([b[2] for b in batch])
return indice, image, label
def collate_train_baseline(batch):
if batch[0][-1] is not None:
return collate_eval(batch)
indice = [b[0] for b in batch]
image = torch.stack([b[1] for b in batch])
return indice, image | 31.152542 | 111 | 0.569206 | 774 | 0.084222 | 0 | 0 | 0 | 0 | 0 | 0 | 2,209 | 0.24037 |
2df9e5faeeb7d475c0095425ade4eae6bf0cbee6 | 7,009 | py | Python | rif_template.py | EngRaff92/RDL_REG_GEN | 1da36a247552217d009b41b035ddda742ad2aa3e | [
"MIT"
] | 2 | 2022-01-11T19:22:16.000Z | 2022-01-11T20:19:55.000Z | rif_template.py | EngRaff92/RDL_REG_GEN | 1da36a247552217d009b41b035ddda742ad2aa3e | [
"MIT"
] | null | null | null | rif_template.py | EngRaff92/RDL_REG_GEN | 1da36a247552217d009b41b035ddda742ad2aa3e | [
"MIT"
] | null | null | null | header = """/*
Icebreaker and IceSugar RSMB5 project - RV32I for Lattice iCE40
With complete open-source toolchain flow using:
-> yosys
-> icarus verilog
-> icestorm project
Tests are written in several languages
-> Systemverilog Pure Testbench (Vivado)
-> UVM testbench (Vivado)
-> PyUvm (Icarus)
-> Formal either using SVA and PSL (Vivado) or cuncurrent assertions with Yosys
Copyright (c) 2021 Raffaele Signoriello (raff.signoriello92@gmail.com)
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included
in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
/*
This file contains register parameters and is autogenerated
*/
"""
sv_inclusion = """
`ifndef COCOTB_SIM
// Main Inclusion
`else
// Main Inclusion
`endif
"""
module_name_param = """
// Main Module
module gen_rif #(
// Parameter Declaration
parameter REG_WIDTH = 32,
parameter ERROUT_IF_NOT_ACCESS = 1
)"""
standard_rif_input_ports = """
(
// Port Declaration
// General RIF input port
input logic rif_clk, // Clock
input logic rif_arst, // Asynchronous reset active high
input logic rif_write, // If 0 -> Read if 1 -> Write
input logic rif_cs, // States if the slave has been properly selected
input logic [REG_WIDTH-1:0] rif_addr, // Address coming into the bus
input logic [REG_WIDTH-1:0] rif_wdata, // Write Data coming into the bus"""
hw_write_template_port = """
input logic [REG_WIDTH-1:0] $input_port_hw_rw_access_name,"""
hw_read_template_port = """
output logic [REG_WIDTH-1:0] $output_port_hw_rw_access_name,"""
standard_rif_output_ports = """
// General RIF output ports
output logic [REG_WIDTH-1:0] rif_rdata, // Read Data coming out the bus
output logic rif_error, // Give error in few specific conditions only
output logic rif_ready // Is controlled by the slave and claims if the specifc slave is busy or not
);"""
set_of_decoder_flags = """
logic $dec_val;"""
set_register = """
logic [REG_WIDTH-1:0] $reg_rw;"""
internal_additional_signals = """
// Register Access Process
logic error_handler, error_access;
logic wr_rq, rd_rq;
// Register decoder we are addressing 1Word at time so remove the first 2 bits
logic [REG_WIDTH-1:0] reg_dec, reg_dec_dly;"""
internal_decoder_signals_generation = """
assign reg_dec = rif_addr >> 2;
always_ff@(posedge rif_clk or posedge rif_arst) begin
if(rif_arst) reg_dec_dly <= 'h0;
else reg_dec_dly <= reg_dec;
end"""
internal_wr_rd_request = """
// Assign the WR_REQUEST and RD_REQUEST
assign wr_rq = rif_write & rif_cs;
assign rd_rq = ~rif_write & rif_cs;
// Register the request to be used for the READY signal
logic [1:0] regsistered_request;
always_ff @(posedge rif_clk or posedge rif_arst) begin : request_reg
if(rif_arst) begin
regsistered_request <= 2'b11;
end else begin
// Regardless of the read of write request we have to register the CS
regsistered_request[0] <= (~rif_cs);
regsistered_request[1] <= regsistered_request[0];
end
end
"""
initialize_decoder_state = """
// Address decoding with full combo logic
always_comb begin: addres_decoding
// Initialize
error_access = 1'b0;"""
init_dec_access = """
$dec_val = 1'b0;"""
case_switch_over_address = """
// Select using the address
case (rif_addr)"""
selection = """
$define_name: begin $dec_val = 1'b1; end"""
defualt_end_case = """
default: begin
if(ERROUT_IF_NOT_ACCESS) error_access = 1'b1;
else error_access = 1'b0;
end
endcase // Endcase
end // addres_decoding
"""
initialize_write_decoder_std = """
// Register write access
always_ff @(posedge rif_clk or posedge rif_arst) begin : proc_reg_write_access
if(rif_arst) begin
rif_rdata <= 'h0;"""
initialize_write_decoder_init_start = """
$reg_name <= $reset_val; """
initialize_write_decoder_init_end = """
end
else begin: reg_write_decoder"""
register_write_decoder_start = """
// Logic for HW = R and SW = RW
if($dec_val) begin
if(wr_rq) begin
$reg_name <= rif_wdata & $sw_write_mask;
end
end"""
register_write_decoder_end = """
end // proc_reg_write_access
"""
errorr_handler_logic_start = """
// check the error using COMBO logic to fire an error if RD happens on a RO register
always_comb begin: read_process_error_handle"""
errorr_handler_logic = """
// Logic for HW = W and SW = RO
if($dec_val) begin
if(wr_rq) begin
error_handler = 1'b1;
end
else if(rd_rq) begin
rif_rdata = $read_reg & $sw_read_mask;
error_handler = 1'b0;
end
end"""
errorr_handler_logic_end = """
end // read_process_error_handle
"""
errorr_handler_write_logic_start = """
// check the error using COMBO logic to fire an error if RD happens on a WO register
always_comb begin: write_process_error_handle"""
errorr_handler_write_logic = """
// Logic for HW = R and SW = WO
if($dec_val) begin
if(rd_rq) begin
error_handler = 1'b1;
rif_rdata = 'h0'
end
else begin
error_handler = 1'b0;
end
end"""
errorr_handler_write_logic_end = """
end // write_process_error_handle
"""
internal_latest_assignement = """
// assign the Error output
assign rif_error = rif_cs ? (error_handler | error_access) : 'h0;
// Assign the ready signal
assign rif_ready = &(regsistered_request);
"""
assign_for_hw_read_policy_reg = """
assign $out_port = rif_cs ? ($reg_name & $hw_read_mask) : 'h0;"""
assign_for_hw_write_policy_reg = """
assign $reg_name = $in_port & $hw_write_mask;"""
end_module_rif = """
endmodule : gen_rif""" | 31.151111 | 119 | 0.66329 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6,110 | 0.871736 |
2dfab9e21824f933194ff41412afd5eaebd5cb04 | 438 | py | Python | reader.py | Asylumrunner/FeedingFrenzy | 7f0d96f8361cb2c37cd41d06345e6ec0aa721472 | [
"MIT"
] | 1 | 2020-12-08T13:09:38.000Z | 2020-12-08T13:09:38.000Z | reader.py | weisisheng/FeedingFrenzy | 7f0d96f8361cb2c37cd41d06345e6ec0aa721472 | [
"MIT"
] | null | null | null | reader.py | weisisheng/FeedingFrenzy | 7f0d96f8361cb2c37cd41d06345e6ec0aa721472 | [
"MIT"
] | 1 | 2020-12-08T13:09:39.000Z | 2020-12-08T13:09:39.000Z | import feedparser
def read_rss_feed(feed_url):
feed = feedparser.parse(feed_url)
return [trim_entry(entry) for entry in feed.entries]
def trim_entry(entry):
return {
'date': "{}/{}/{}".format(entry.published_parsed.tm_year, entry.published_parsed.tm_mon, entry.published_parsed.tm_mday),
'title': entry.title,
'link': entry.link,
'author': entry.author,
'summary': entry.summary
} | 31.285714 | 129 | 0.6621 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 46 | 0.105023 |
2dfac8a859dbed739d1b56982bcc545aec26ba63 | 9,229 | py | Python | modmail/config.py | fossabot/modmail-1 | ffbc3f981efb455f920e8c9f52657fbc2b802816 | [
"MIT"
] | null | null | null | modmail/config.py | fossabot/modmail-1 | ffbc3f981efb455f920e8c9f52657fbc2b802816 | [
"MIT"
] | null | null | null | modmail/config.py | fossabot/modmail-1 | ffbc3f981efb455f920e8c9f52657fbc2b802816 | [
"MIT"
] | null | null | null | import asyncio
import datetime
import json
import logging
import os
import sys
import typing
from pathlib import Path
from typing import Any, Dict, Optional, Tuple
import discord
import toml
from discord.ext.commands import BadArgument
from pydantic import BaseModel
from pydantic import BaseSettings as PydanticBaseSettings
from pydantic import Field, SecretStr
from pydantic.env_settings import SettingsSourceCallable
from pydantic.types import conint
log = logging.getLogger(__name__)
CONFIG_PATHS: list = [
f"{os.getcwd()}/config.toml",
f"{os.getcwd()}/modmail/config.toml",
"./config.toml",
]
DEFAULT_CONFIG_PATHS = [os.path.join(os.path.dirname(__file__), "config-default.toml")]
def determine_file_path(
paths=typing.Union[list, tuple], config_type: str = "default"
) -> typing.Union[str, None]:
path = None
for file_path in paths:
config_file = Path(file_path)
if (config_file).exists():
path = config_file
log.debug(f"Found {config_type} config at {file_path}")
break
return path or None
DEFAULT_CONFIG_PATH = determine_file_path(DEFAULT_CONFIG_PATHS)
USER_CONFIG_PATH = determine_file_path(CONFIG_PATHS, config_type="")
def toml_default_config_source(settings: PydanticBaseSettings) -> Dict[str, Any]:
"""
A simple settings source that loads variables from a toml file
from within the module's source folder.
Here we happen to choose to use the `env_file_encoding` from Config
when reading `config-default.toml`
"""
return dict(**toml.load(DEFAULT_CONFIG_PATH))
def toml_user_config_source(settings: PydanticBaseSettings) -> Dict[str, Any]:
"""
A simple settings source that loads variables from a toml file
from within the module's source folder.
Here we happen to choose to use the `env_file_encoding` from Config
when reading `config-default.toml`
"""
if USER_CONFIG_PATH:
return dict(**toml.load(USER_CONFIG_PATH))
else:
return dict()
class BaseSettings(PydanticBaseSettings):
class Config:
extra = "ignore"
env_file = ".env"
env_file_encoding = "utf-8"
@classmethod
def customise_sources(
cls,
init_settings: SettingsSourceCallable,
env_settings: SettingsSourceCallable,
file_secret_settings: SettingsSourceCallable,
) -> Tuple[SettingsSourceCallable, ...]:
return (
env_settings,
init_settings,
file_secret_settings,
toml_user_config_source,
toml_default_config_source,
)
class ThreadBaseSettings(BaseSettings):
class Config:
env_prefix = "thread."
# @classmethod
# def alias_generator(cls, string: str) -> str:
# return f"thread.{super.__name__}.{string}"
class BotActivityConfig(BaseSettings):
twitch_url: str = "https://www.twitch.tv/discordmodmail/"
class BotConfig(BaseSettings):
prefix: str = "?"
activity: BotActivityConfig
token: str = None
modmail_guild_id: str = None
guild_id: str = None
multi_bot: bool = False
log_url: str = None
log_url_prefix = "/"
github_token: SecretStr = None
database_type: str = "mongodb" # TODO limit to specific strings
enable_plugins: bool = True
enable_eval: bool = True
data_collection = True
owners: str = 1
connection_uri: str = None
level_permissions: dict = None
class Config:
# env_prefix = "bot."
allow_mutation = False
class ColorsConfig(BaseSettings):
main_color: str = str(discord.Colour.blurple())
error_color: str = str(discord.Colour.red())
recipient_color: str = str(discord.Colour.green())
mod_color: str = str(discord.Colour.blue())
class ChannelConfig(BaseSettings):
# all of the below should be validated to channels
# either by name or by int
main_category: str = None
fallback_category: str = None
log_channel: str = None
mention_channel: str = None
update_channel: str = None
class DevConfig(BaseSettings):
"""
Developer specific configuration.
These settings should not be changed unless you know what you're doing.
"""
log_level: conint(ge=0, le=50) = getattr(logging, "NOTICE", 25)
class EmojiConfig(BaseSettings):
"""
Standard emojis that the bot uses when a specific emoji is not defined for a specific use.
"""
sent_emoji: str = "\\N{WHITE HEAVY CHECK MARK}" # TODO type as a discord emoji
blocked_emoji: str = "\\N{NO ENTRY SIGN}" # TODO type as a discord emoji
class InternalConfig(BaseModel):
# do NOT set these yourself. The bot will handle these
activity_message: str = None
activity_type: None = None
status: None = None
dm_disabled: int = 0
# moderation
blocked: dict = dict()
blocked_roles: dict = dict()
blocked_whitelist: list = dict()
command_permissions: dict = dict()
level_permissions: dict = dict()
override_command_level: dict = dict()
# threads
snippets: dict = dict()
notifications: dict = dict()
subscriptions: dict = dict()
closures: dict = dict()
# misc
plugins: list = list()
aliases: dict = dict()
auto_triggers: dict = dict()
command_permissions: dict = dict()
level_permissions: dict = dict()
class Config:
arbitrary_types_allowed = True
class MentionConfig(BaseSettings):
alert_on_mention: bool = False
silent_alert_on_mention: bool = False
mention_channel: int = None
class SnippetConfig(BaseSettings):
anonmous_snippets: bool = False
use_regex_autotrigger: bool = False
class ThreadAnonConfig(ThreadBaseSettings):
username: str = "Response"
footer: str = "Staff Team"
class ThreadAutoCloseConfig(ThreadBaseSettings):
time: datetime.timedelta = 0
silently: bool = False
response: str = "This thread has been closed automatically due to inactivity after {timeout}."
class ThreadCloseConfig(ThreadBaseSettings):
footer: str = "Replying will create a new thread"
title: str = "Thread Closed"
response: str = "{closer.mention} has closed this Modmail thread."
on_leave: bool = False
on_leave_reason: str = "The recipient has left the server."
self_close_response: str = "You have closed this Modmail thread."
class ThreadConfirmCreationConfig(ThreadBaseSettings):
enabled: bool = False
title: str = "Confirm thread creation"
response: str = "React to confirm thread creation which will directly contact the moderators"
accept_emoji: str = "\N{WHITE HEAVY CHECK MARK}" # TODO type as a discord emoji
deny_emoji: str = "\N{NO ENTRY SIGN}" # TODO type as a discord emoji
class ThreadCooldownConfig(ThreadBaseSettings):
time: datetime.timedelta = 0
embed_title: str = "Message not sent!"
response: str = "You must wait for {delta} before you can contact me again."
class ThreadCreationConfig(ThreadBaseSettings):
response: str = "The staff team will get back to you as soon as possible."
footer: str = "Your message has been sent"
title: str = "Thread Created"
class ThreadDisabledConfig(ThreadBaseSettings):
new_title: str = "Not Delivered"
new_response: str = "We are not accepting new threads."
new_footer: str = "Please try again later..."
current_title: str = "Not Delivered"
current_response: str = "We are not accepting any messages."
current_footer: str = "Please try again later..."
class ThreadMoveConfig(ThreadBaseSettings):
title: str = "Thread Moved"
notify: bool = False
notify_mods: bool = False
response: str = "This thread has been moved."
class ThreadSelfClosableConfig(ThreadBaseSettings):
enabled: bool = False
lock_emoji: str = "\N{LOCK}"
creation_footer: str = "Click the lock to close the thread"
class ThreadConfig(BaseSettings):
anon_reply_without_command: bool = False
reply_without_command: bool = False
plain_reply_without_command: bool = False
mention: str = "@here"
user_typing: bool = False
mod_typing: bool = False
transfer_reactions: bool = True
contact_silently: bool = False
account_age: datetime.timedelta = 0
guild_age: datetime.timedelta = 0
mod_tag: str = ""
show_timestamp: bool = True
anon: ThreadAnonConfig
auto_close: ThreadAutoCloseConfig
close: ThreadCloseConfig
confirm_creation: ThreadConfirmCreationConfig
cooldown: ThreadCooldownConfig
creation: ThreadCreationConfig
disabled: ThreadDisabledConfig
move: ThreadMoveConfig
self_closable: ThreadSelfClosableConfig
class UpdateConfig(BaseSettings):
disable_autoupdates: bool = False
update_notifications: bool = True
class Config:
allow_mutation = False
env_prefix = "updates."
class ModmailConfig(BaseSettings):
bot: BotConfig
colors: ColorsConfig
channels: ChannelConfig
dev: DevConfig
emoji: EmojiConfig
mention: MentionConfig
snippets: SnippetConfig
thread: ThreadConfig
updates: UpdateConfig
shell: str = None
CONFIG = ModmailConfig()
INTERNAL = InternalConfig()
| 29.113565 | 98 | 0.694983 | 7,086 | 0.767797 | 0 | 0 | 487 | 0.052768 | 0 | 0 | 2,301 | 0.249323 |
2dfc0e1a9a6d36009062ade1fde99b49f19b17a2 | 5,851 | py | Python | turnip/prots.py | RuthAngus/turnip | 83aa899b6f35e709cd8a63c5395ff8e070a91bb9 | [
"MIT"
] | null | null | null | turnip/prots.py | RuthAngus/turnip | 83aa899b6f35e709cd8a63c5395ff8e070a91bb9 | [
"MIT"
] | null | null | null | turnip/prots.py | RuthAngus/turnip | 83aa899b6f35e709cd8a63c5395ff8e070a91bb9 | [
"MIT"
] | null | null | null | # plot rotation period vs orbital period
import os
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
import glob
import re
from gyro import gyro_age
import teff_bv as tbv
import scipy.stats as sps
from calc_completeness import calc_comp
# np.set_printoptions(threshold=np.nan, linewidth=9)
plotpar = {'axes.labelsize': 18,
'text.fontsize': 10,
'legend.fontsize': 18,
'xtick.labelsize': 18,
'ytick.labelsize': 18,
'text.usetex': True}
plt.rcParams.update(plotpar)
DATA_DIR = "/Users/ruthangus/projects/turnip/turnip/data/"
def save_data(nbins):
fnames = glob.glob(os.path.join(DATA_DIR, "/koi_results/*h5"))
koi, period, errp, errm, lnerrp, lnerrm = [], [], [], [], [], []
for i, fn in enumerate(fnames):
df = pd.read_hdf(fn, key="samples")
phist, bins = np.histogram(df.ln_period.values, nbins)
ln_p = bins[phist == max(phist)][0]
period.append(np.exp(ln_p))
lnerrp.append(np.percentile(df.ln_period.values, 84) - ln_p)
lnerrm.append(ln_p - np.percentile(df.ln_period.values, 16))
errp.append(np.exp(lnerrp[i]/ln_p))
errm.append(np.exp(lnerrm[i]/ln_p))
koi.append(re.findall('\d+', fn)[0])
table = pd.DataFrame({"koi": np.array(koi), "period": np.array(period),
"errp": np.array(errp), "errm": np.array(errm)})
table.to_csv("period_point_estimates.csv")
def make_histogram():
df = pd.read_csv("period_point_estimates.csv")
plt.clf()
plt.hist(df.period, 20)
plt.savefig("gp_period_hist")
def make_df():
df = pd.read_csv("period_point_estimates.csv")
planets = pd.read_csv(os.path.join(DATA_DIR, "cumulative.csv"),
skiprows=155)
kois = []
for i, k in enumerate(planets.kepoi_name.values):
# print(planets.kepoi_name.values[i])
# print(type(planets.kepoi_name.values[i]))
koi_str = re.findall('\d+', planets.kepoi_name.values[i])[0]
kois.append(int(koi_str))
planets["koi"] = kois
joint = pd.merge(planets, df, on="koi")
joint.to_csv("planet_periods.csv")
def plot_periods():
df = pd.read_csv("planet_periods.csv")
m = np.log(df.period.values) > 1
lnporb = np.log(df.koi_period.values[m])
lnprot = np.log(df.period.values[m])
porb = df.koi_period.values[m]
prot = df.period.values[m]
radius = np.log(df.koi_prad.values[m])
teff = df.koi_steff.values[m]
plt.clf()
plt.scatter(porb, prot, s=5*radius, c=teff, vmin=4400, vmax=7000)
plt.loglog()
plt.colorbar()
plt.xlabel("$\ln(\mathrm{Orbital~period})$")
plt.ylabel("$\ln(\mathrm{Rotation~period})$")
plt.subplots_adjust(bottom=.15)
plt.savefig("period_period")
# find the short rotators
m = np.log(df.period.values) < 1
print(df.koi.values[m])
# import kplr
# client = kplr.API()
# for i, k in enumerate(df.koi.values[m]):
# print(k)
# star = client.koi("{}.01".format(k))
# star.get_light_curves(fetch=True)
def plot_radii():
df = pd.read_csv("planet_periods.csv")
m = np.log(df.period.values) > 1
prot = df.period.values[m]
radius = np.log(df.koi_prad.values[m])
teff = df.koi_steff.values[m]
logg = df.koi_slogg.values[m]
feh = np.zeros(len(logg))
gyro = gyro_age(prot, teff, feh, logg)
age = gyro.barnes07("mh")
m = np.isfinite(age)
plt.clf()
plt.scatter(np.log(age[m]), np.log(radius[m]), c=teff[m], s=10, vmin=4400,
vmax=7000)
plt.colorbar()
plt.xlabel("$\ln(\mathrm{Age,~Gyr})$")
plt.ylabel("$\ln(\mathrm{Radius}, R_J)$")
plt.subplots_adjust(bottom=.15)
plt.savefig("age_radius")
l = age[m] < 3.295
print(len(radius[m][l]))
print(len(radius[m][~l]))
plt.clf()
plt.hist(radius[m][l], 50, normed=True, alpha=.5, label="young")
plt.hist(radius[m][~l], 40, normed=True, alpha=.5, label="old")
plt.legend()
plt.xlabel("Radius")
plt.savefig("radius_hist")
print(sps.ks_2samp(radius[m][l], radius[m][~l]))
cum_young = np.cumsum(radius[m][l]) / sum(radius[m][l])
cum_old = np.cumsum(radius[m][~l]) / sum(radius[m][~l])
plt.clf()
plt.plot(cum_young, label="young")
plt.plot(cum_old, label="old")
plt.savefig("radius_cdf")
# # print(np.unique(df.kepid.values[m]))
# for i in np.unique(df.kepid.values[m]):
# print("KIC", str(int(i)).zfill(9))
n = radius[m][l] < .5
n2 = radius[m][~l] < .5
print(len(radius[m][l][n]))
print(len(radius[m][~l][n2]))
plt.clf()
plt.hist(radius[m][l][n], 50, normed=True, alpha=.5, label="young")
plt.hist(radius[m][~l][n2], 40, normed=True, alpha=.5, label="old")
plt.legend()
plt.xlabel("Radius")
plt.savefig("radius_hist_hj")
print(sps.ks_2samp(radius[m][l][n], radius[m][~l][n2]))
n = radius[m] < .5
plt.clf()
plt.scatter(np.log(age[m][n]), np.log(radius[m][n]), c=teff[m][n], s=10,
vmin=4400, vmax=7000)
plt.colorbar()
plt.xlabel("$\ln(\mathrm{Age,~Gyr})$")
plt.ylabel("$\ln(\mathrm{Radius}, R_J)$")
plt.subplots_adjust(bottom=.15)
plt.savefig("age_radius_hj")
def plot_completeness():
df = pd.read_csv("planet_periods.csv")
comp = np.zeros((len(df.kepid.values)))
print(df.kepid.values[:10])
for i, kepid in enumerate(df.kepid.values[:10]):
print(i, "of", len(df.kepid.values))
print("id = ", kepid)
comp[i] = calc_comp(kepid, 365.25, 1.)
print(comp[i])
df["probtot"] = comp
plt.clf()
plt.plot(comp[:10], df.period.values[:10], "k.")
plt.savefig("comp_vs_period")
if __name__ == "__main__":
# save_data(100)
# make_histogram()
# make_df()
# plot_periods()
# plot_radii()
plot_completeness()
| 30.633508 | 78 | 0.604683 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,338 | 0.228679 |
2dfc27f1da91e13bb0050491db5539cdfb26d239 | 9,911 | py | Python | proper_mod/prop_dm.py | RupertDodkins/medis | bdb1f00fb93506da2a1f251bc6780e70e97a16c5 | [
"MIT"
] | 1 | 2021-06-25T17:35:56.000Z | 2021-06-25T17:35:56.000Z | proper_mod/prop_dm.py | RupertDodkins/medis | bdb1f00fb93506da2a1f251bc6780e70e97a16c5 | [
"MIT"
] | null | null | null | proper_mod/prop_dm.py | RupertDodkins/medis | bdb1f00fb93506da2a1f251bc6780e70e97a16c5 | [
"MIT"
] | 2 | 2018-12-08T15:05:13.000Z | 2019-08-08T17:28:24.000Z | # Copyright 2016, 2017 California Institute of Technology
# Users must agree to abide by the restrictions listed in the
# file "LegalStuff.txt" in the PROPER library directory.
#
# PROPER developed at Jet Propulsion Laboratory/California Inst. Technology
# Original IDL version by John Krist
# Python translation by Navtej Saini, with Luis Marchen and Nikta Amiri
#
# Revised 5 March 2018 - John Krist - Fixed call to prop_cubic_conv by
# getting rid of the flattening of the coordinate arrays.
import os
import proper
import numpy as np
from math import sin, cos
# from . import lib_dir
lib_dir = os.path.dirname(proper.__file__)
import scipy.signal as ss
if not proper.use_cubic_conv:
from scipy.ndimage.interpolation import map_coordinates
def prop_dm(wf, dm_z0, dm_xc, dm_yc, spacing = 0., **kwargs):
"""Simulate a deformable mirror of specified actuator spacing, including the
effects of the DM influence function.
Parameters
----------
wf : obj
WaveFront class object
dm_z0 : str or numpy ndarray
Either a 2D numpy array containing the surface piston of each DM
actuator in meters or the name of a 2D FITS image file containing the
above
dm_xc, dm_yc : list or numpy ndarray
The location of the optical axis (center of the wavefront) on the DM in
actuator units (0 ro num_actuator-1). The center of the first actuator
is (0.0, 0.0)
spacing : float
Defines the spacing in meters between actuators; must not be used when
n_act_across_pupil is specified.
Returns
-------
dmap : numpy ndarray
Returns DM surface (not wavefront) map in meters
Other Parameters
----------------
FIT : bool
Switch that tells routine that the values in "dm_z" are the desired
surface heights rather than commanded actuator heights, and so the
routine should fit this map, accounting for actuator influence functions,
to determine the necessary actuator heights. An iterative error-minimizing
loop is used for the fit.
NO_APPLY : bool
If set, the DM pattern is not added to the wavefront. Useful if the DM
surface map is needed but should not be applied to the wavefront
N_ACT_ACROSS_PUPIL : int
Specifies the number of actuators that span the X-axis beam diameter. If
it is a whole number, the left edge of the left pixel is aligned with
the left edge of the beam, and the right edge of the right pixel with
the right edge of the beam. This determines the spacing and size of the
actuators. Should not be used when "spacing" value is specified.
XTILT, YTILT, ZTILT : float
Specify the rotation of the DM surface with respect to the wavefront plane
in degrees about the X, Y, Z axes, respectively, with the origin at the
center of the wavefront. The DM surface is interpolated and orthographically
projected onto the wavefront grid. The coordinate system assumes that
the wavefront and initial DM surface are in the X,Y plane with a lower
left origin with Z towards the observer. The rotations are left handed.
The default rotation order is X, Y, then Z unless the /ZYX switch is set.
XYZ or ZYX : bool
Specifies the rotation order if two or more of XTILT, YTILT, or ZTILT
are specified. The default is /XYZ for X, Y, then Z rotations.
Raises
------
ValueError:
User cannot specify both actuator spacing and N_ACT_ACROSS_PUPIL
ValueError:
User must specify either actuator spacing or N_ACT_ACROSS_PUPIL
"""
if "ZYX" in kwargs and "XYZ" in kwargs:
raise ValueError('PROP_DM: Error: Cannot specify both XYZ and ZYX rotation orders. Stopping')
elif not "ZYX" in kwargs and not 'XYZ' in kwargs:
XYZ = 1 # default is rotation around X, then Y, then Z
ZYX = 0
elif "ZYX" in kwargs:
ZYX = 1
XYZ = 0
elif "XYZ" in kwargs:
XYZ = 1
ZYX = 0
if "XTILT" in kwargs:
xtilt = kwargs["XTILT"]
else:
xtilt = 0.
if "YTILT" in kwargs:
ytilt = kwargs["YTILT"]
else:
ytilt = 0.
if "ZTILT" in kwargs:
ztilt = kwargs["ZTILT"]
else:
ztilt = 0.
if type(dm_z0) == str:
dm_z = proper.prop_fits_read(dm_z0) # Read DM setting from FITS file
else:
dm_z = dm_z0
n = proper.prop_get_gridsize(wf)
dx_surf = proper.prop_get_sampling(wf) # sampling of current surface in meters
beamradius = proper.prop_get_beamradius(wf)
# influence function sampling is 0.1 mm, peak at (x,y)=(45,45)
# Influence function has shape = 1x91x91. Saving it as a 2D array
# before continuing with processing
inf = proper.prop_fits_read(os.path.join(lib_dir, "influence_dm5v2.fits"))
inf = inf[0,:,:]
s = inf.shape
nx_inf = s[1]
ny_inf = s[0]
xc_inf = int(nx_inf/2)
yc_inf = int(ny_inf/2)
dx_inf = 0.1e-3 # influence function spacing in meters
dx_dm_inf = 1.e-3 # spacing between DM actuators in meters assumed by influence function
inf_mag = 10
if spacing != 0 and "N_ACT_ACROSS_PUPIL" in kwargs:
raise ValueError("PROP_DM: User cannot specify both actuator spacing and N_ACT_ACROSS_PUPIL. Stopping.")
if spacing == 0 and not "N_ACT_ACROSS_PUPIL" in kwargs:
raise ValueError("PROP_DM: User must specify either actuator spacing or N_ACT_ACROSS_PUPIL. Stopping.")
if "N_ACT_ACROSS_PUPIL" in kwargs:
dx_dm = 2. * beamradius / int(kwargs["N_ACT_ACROSS_PUPIL"])
else:
dx_dm = spacing
dx_inf = dx_inf * dx_dm / dx_dm_inf # Influence function sampling scaled
# to specified DM actuator spacing
if "FIT" in kwargs:
x = (np.arange(5, dtype = np.float64) - 2) * dx_dm
if proper.use_cubic_conv:
inf_kernel = proper.prop_cubic_conv(inf.T, x/dx_inf+xc_inf, x/dx_inf+yc_inf, GRID=True)
else:
xygrid = np.meshgrid(x/dx_inf+xc_inf, x/dx_inf+yc_inf)
inf_kernel = map_coordinates(inf.T, xygrid, order = 3, mode = "nearest")
(dm_z_commanded, dms) = proper.prop_fit_dm(dm_z, inf_kernel)
else:
dm_z_commanded = dm_z
s = dm_z.shape
nx_dm = s[1]
ny_dm = s[0]
# Create subsampled DM grid
margin = 9 * inf_mag
nx_grid = nx_dm * inf_mag + 2 * margin
ny_grid = ny_dm * inf_mag + 2 * margin
xoff_grid = margin + inf_mag/2 # pixel location of 1st actuator center in subsampled grid
yoff_grid = xoff_grid
dm_grid = np.zeros([ny_grid, nx_grid], dtype = np.float64)
x = np.arange(nx_dm, dtype = np.int16) * int(inf_mag) + int(xoff_grid)
y = np.arange(ny_dm, dtype = np.int16) * int(inf_mag) + int(yoff_grid)
dm_grid[np.tile(np.vstack(y), (nx_dm,)), np.tile(x, (ny_dm,1))] = dm_z_commanded
dm_grid = ss.fftconvolve(dm_grid, inf, mode = 'same')
# 3D rotate DM grid and project orthogonally onto wavefront
xdim = int(np.round(np.sqrt(2) * nx_grid * dx_inf / dx_surf)) # grid dimensions (pix) projected onto wavefront
ydim = int(np.round(np.sqrt(2) * ny_grid * dx_inf / dx_surf))
if xdim > n: xdim = n
if ydim > n: ydim = n
x = np.ones((ydim,1), dtype = np.int) * ((np.arange(xdim) - int(xdim/2)) * dx_surf)
y = (np.ones((xdim,1), dtype = np.int) * ((np.arange(ydim) - int(ydim/2)) * dx_surf)).T
a = xtilt * np.pi / 180
b = ytilt * np.pi / 180
g = ztilt * np.pi /180
if XYZ:
m = np.array([ [cos(b)*cos(g), -cos(b)*sin(g), sin(b), 0],
[cos(a)*sin(g) + sin(a)*sin(b)*cos(g), cos(a)*cos(g)-sin(a)*sin(b)*sin(g), -sin(a)*cos(b), 0],
[sin(a)*sin(g)-cos(a)*sin(b)*cos(g), sin(a)*cos(g)+cos(a)*sin(b)*sin(g), cos(a)*cos(b), 0],
[0, 0, 0, 1] ])
else:
m = np.array([ [cos(b)*cos(g), cos(g)*sin(a)*sin(b)-cos(a)*sin(g), cos(a)*cos(g)*sin(b)+sin(a)*sin(g), 0],
[cos(b)*sin(g), cos(a)*cos(g)+sin(a)*sin(b)*sin(g), -cos(g)*sin(a)+cos(a)*sin(b)*sin(g), 0],
[-sin(b), cos(b)*sin(a), cos(a)*cos(b), 0],
[0, 0, 0, 1] ])
# Forward project a square
edge = np.array([[-1.0,-1.0,0.0,0.0], [1.0,-1.0,0.0,0.0], [1.0,1.0,0.0,0.0], [-1.0,1.0,0.0,0.0]])
new_xyz = np.dot(edge, m)
# determine backward projection for screen-raster-to-DM-surce computation
dx_dxs = (new_xyz[0,0] - new_xyz[1,0]) / (edge[0,0] - edge[1,0])
dx_dys = (new_xyz[1,0] - new_xyz[2,0]) / (edge[1,1] - edge[2,1])
dy_dxs = (new_xyz[0,1] - new_xyz[1,1]) / (edge[0,0] - edge[1,0])
dy_dys = (new_xyz[1,1] - new_xyz[2,1]) / (edge[1,1] - edge[2,1])
xs = ( x/dx_dxs - y*dx_dys/(dx_dxs*dy_dys) ) / ( 1 - dy_dxs*dx_dys/(dx_dxs*dy_dys) )
ys = ( y/dy_dys - x*dy_dxs/(dx_dxs*dy_dys) ) / ( 1 - dx_dys*dy_dxs/(dx_dxs*dy_dys) )
xdm = (xs + dm_xc * dx_dm) / dx_inf + xoff_grid
ydm = (ys + dm_yc * dx_dm) / dx_inf + yoff_grid
if proper.use_cubic_conv:
grid = proper.prop_cubic_conv(dm_grid.T, xdm, ydm, GRID = False)
grid = grid.reshape([xdm.shape[1], xdm.shape[0]])
else:
grid = map_coordinates(dm_grid.T, [xdm, ydm], order = 3, mode = "nearest", prefilter = True)
dmap = np.zeros([n,n], dtype = np.float64)
nx_grid, ny_grid = grid.shape
xmin, xmax = int(n/2 - xdim/2), int(n/2 - xdim/2 + nx_grid)
ymin, ymax = int(n/2 - ydim/2), int(n/2 - ydim/2 + ny_grid)
dmap[ymin:ymax, xmin:xmax] = grid
# Random dots sometimes appear in the phase map. This is a little temporary hack to deal with that bug!
import scipy.ndimage
sigma = [1, 1]
dmap = scipy.ndimage.filters.gaussian_filter(dmap, sigma, mode='constant')
if not "NO_APPLY" in kwargs:
proper.prop_add_phase(wf, 2 * dmap) # x2 to convert surface to wavefront error
return dmap
| 38.414729 | 114 | 0.634648 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 4,729 | 0.477147 |
2dfeb124e98b0f347e20cdfa1b942484c6bbcdf7 | 1,571 | py | Python | asap-tools/experiments/depricated/handler/comparative.py | project-asap/Profiler | eaad7eafe3635a2d0881c13069a3ac632784fd3a | [
"Apache-2.0"
] | 3 | 2017-04-18T17:09:29.000Z | 2017-11-08T22:55:32.000Z | asap-tools/experiments/depricated/handler/comparative.py | project-asap/Profiler | eaad7eafe3635a2d0881c13069a3ac632784fd3a | [
"Apache-2.0"
] | 18 | 2016-11-07T10:44:58.000Z | 2017-04-25T12:40:24.000Z | asap-tools/experiments/depricated/handler/comparative.py | project-asap/Profiler | eaad7eafe3635a2d0881c13069a3ac632784fd3a | [
"Apache-2.0"
] | 4 | 2015-12-09T09:09:59.000Z | 2018-05-23T14:29:00.000Z | __author__ = 'cmantas'
from tools import *
# Kmeans mahout vs spark
m_q = """select mahout_kmeans_text.documents/1000, mahout_kmeans_text.time/1000
from mahout_tfidf inner join mahout_kmeans_text
ON
mahout_tfidf.documents=mahout_kmeans_text.documents AND
mahout_tfidf.dimensions=mahout_kmeans_text.dimensions
where minDF=10 and k={};"""
# plot_from_query(m_q.format(20), label="Mahout, k=20", title="K-Means, Mahout vs Spark", xlabel="#docs/1000", ylabel="#terms")
# plot_from_query("select documents/1000, time/1000 from spark_kmeans_text WHERE k=20 and minDF=10", label="Spark, k=20")
## K-means
# k=10; minDF=10
# figure()
# draw_single_kmeans("weka", k, minDF,title="K-Means: WEKA, Mahout, Spark")
# draw_single_kmeans("mahout", k, minDF)
# draw_single_kmeans("spark", k, minDF, where_extra="spark_kmeans_text.documents<130000")
# show()
# exit()
# tfidf
figure()
plot_from_query("select documents/1000, avg(time/1000) from spark_tfidf where minDF=10 and documents<130000 group by documents",
label="Spark TF/IDF", xlabel="#docs/1000", ylabel="time (sec)", title="TF/IDF Performance")
plot_from_query("select documents/1000, time/1000 from mahout_tfidf WHERE minDF=10", label="Mahout, minDF=10")
plot_from_query("select documents/1000, time/1000 from weka_tfidf WHERE minDF=10", label="Mahout, minDF=10")
figure()
plot_from_query("select documents/1000, dimensions/1000 from weka_tfidf where minDF=10", title="doc freq", label="weka")
plot_from_query("select documents/1000, dimensions/1000 from mahout_tfidf where minDF=10", label="mahout")
show()
| 38.317073 | 128 | 0.760025 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,313 | 0.835773 |
2dff63249548afd5c182283e332b2e72f7db6505 | 1,818 | py | Python | docs/sample_code/debugging_info/src/dataset.py | mindspore-ai/docs | e7cbd69fe2bbd7870aa4591510ed3342ec6a3d41 | [
"Apache-2.0",
"CC-BY-4.0"
] | 288 | 2020-03-28T07:00:25.000Z | 2021-12-26T14:56:31.000Z | docs/sample_code/debugging_info/src/dataset.py | mindspore-ai/docs | e7cbd69fe2bbd7870aa4591510ed3342ec6a3d41 | [
"Apache-2.0",
"CC-BY-4.0"
] | 1 | 2021-07-21T08:11:58.000Z | 2021-07-21T08:11:58.000Z | docs/sample_code/debugging_info/src/dataset.py | mindspore-ai/docs | e7cbd69fe2bbd7870aa4591510ed3342ec6a3d41 | [
"Apache-2.0",
"CC-BY-4.0"
] | 37 | 2020-03-30T06:38:37.000Z | 2021-09-17T05:47:59.000Z | # Copyright 2021 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""dataset
Custom dataset.
"""
import numpy as np
from mindspore import dataset as ds
def get_data(num, img_size=(1, 32, 32), num_classes=10, is_onehot=True):
for _ in range(num):
img = np.random.randn(*img_size)
target = np.random.randint(0, num_classes)
target_ret = np.array([target]).astype(np.float32)
if is_onehot:
target_onehot = np.zeros(shape=(num_classes,))
target_onehot[target] = 1
target_ret = target_onehot.astype(np.float32)
yield img.astype(np.float32), target_ret
def create_train_dataset(num_data=32768, batch_size=32, repeat_size=1):
input_data = ds.GeneratorDataset(list(get_data(num_data)), column_names=['data', 'label'])
input_data = input_data.batch(batch_size, drop_remainder=True)
input_data = input_data.repeat(repeat_size)
return input_data
def create_eval_dataset(num_data=2048, batch_size=2048, repeat_size=1):
input_data = ds.GeneratorDataset(list(get_data(num_data)), column_names=['data', 'label'])
input_data = input_data.batch(batch_size)
input_data = input_data.repeat(repeat_size)
return input_data
| 42.27907 | 94 | 0.69802 | 0 | 0 | 474 | 0.260726 | 0 | 0 | 0 | 0 | 709 | 0.389989 |
2dffcb312aeb4d575f278141844b269942663fab | 3,338 | py | Python | exercise/venv/lib/python3.7/site-packages/sqreen/sdk/events.py | assuzzanne/my-sqreen | 81ae0eab417a1dbc0ae6b1778ebfdd71591c3c5b | [
"MIT"
] | null | null | null | exercise/venv/lib/python3.7/site-packages/sqreen/sdk/events.py | assuzzanne/my-sqreen | 81ae0eab417a1dbc0ae6b1778ebfdd71591c3c5b | [
"MIT"
] | 1 | 2021-06-02T00:27:34.000Z | 2021-06-02T00:27:34.000Z | exercise/venv/lib/python3.7/site-packages/sqreen/sdk/events.py | assuzzanne/notifications-dispatcher-api | 81ae0eab417a1dbc0ae6b1778ebfdd71591c3c5b | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# Copyright (c) 2016, 2017, 2018, 2019 Sqreen. All rights reserved.
# Please refer to our terms for more information:
#
# https://www.sqreen.io/terms.html
#
import logging
import traceback
from datetime import datetime
from ..runtime_storage import runtime
from ..utils import is_string
LOGGER = logging.getLogger(__name__)
_SQREEN_EVENT_PREFIX = "sq."
_TRACK_OPTIONS_FIELDS = frozenset(
["properties", "user_identifiers", "timestamp"]
)
_TRACK_PAYLOAD_SECTIONS = ("request", "response", "params", "headers")
STACKTRACE_EVENTS = set()
_MAX_EVENT_PROPERTIES = 16
def _track_unsafe(event, options, storage=runtime):
"""Track an SDK event.
This function is used internally in the agent to send built-in SDK events,
e.g. output of security actions. It does not perform any check and is not
exposed to the user.
"""
if "timestamp" not in options:
options = dict(options)
options["timestamp"] = datetime.utcnow()
storage.observe(
"sdk",
["track", options["timestamp"], event, options],
payload_sections=_TRACK_PAYLOAD_SECTIONS,
report=True,
)
return True
def track(event, options=None, storage=runtime):
"""Track an SDK event."""
# Check event type.
if not is_string(event):
raise TypeError(
"event name must be a string, not {}".format(
event.__class__.__name__
)
)
# Check event name.
if event.startswith(_SQREEN_EVENT_PREFIX):
LOGGER.warning(
"Event names starting with %r are reserved, "
"event %r has been ignored",
_SQREEN_EVENT_PREFIX,
event,
)
return False
if options is None:
options = {}
else:
options = dict(options)
# Check option keys.
for option_key in list(options):
if option_key not in _TRACK_OPTIONS_FIELDS:
LOGGER.warning("Invalid option key %r, skipped", option_key)
del options[option_key]
timestamp = options.get("timestamp")
if timestamp and not isinstance(timestamp, datetime):
raise TypeError(
"timestamp option must be a datetime object, not {}".format(
event.__class__.__name__
)
)
properties = options.get("properties")
# Check the number of properties.
if properties and len(properties) > _MAX_EVENT_PROPERTIES:
LOGGER.warning(
"Event %r has %d properties, "
"only the first %d ones will be reported",
event,
len(properties),
_MAX_EVENT_PROPERTIES,
)
options["properties"] = dict(
sorted(properties.items())[:_MAX_EVENT_PROPERTIES]
)
# Store stacktrace if required.
if event in STACKTRACE_EVENTS:
LOGGER.debug("Stacktrace recorded by for event %s", event)
options["stacktrace"] = traceback.format_stack()
return _track_unsafe(event, options, storage=storage)
def track_action(action, output, storage=runtime):
"""Track an action output."""
if not action.send_response:
return
return _track_unsafe(
"sq.action.{}".format(action.name),
{"properties": {"output": output, "action_id": action.iden}},
storage=storage,
)
| 29.539823 | 78 | 0.629718 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,088 | 0.325944 |
930100236a4359a4387dd68192ff211e5a4519df | 149 | py | Python | roona/roona/doctype/roona_app_setting/test_roona_app_setting.py | mohsinalimat/roona | b24336d9d56eb443a883131afffd9091c9a66add | [
"MIT"
] | 1 | 2021-08-28T04:24:00.000Z | 2021-08-28T04:24:00.000Z | roona/roona/doctype/roona_app_setting/test_roona_app_setting.py | mohsinalimat/roona | b24336d9d56eb443a883131afffd9091c9a66add | [
"MIT"
] | null | null | null | roona/roona/doctype/roona_app_setting/test_roona_app_setting.py | mohsinalimat/roona | b24336d9d56eb443a883131afffd9091c9a66add | [
"MIT"
] | null | null | null | # Copyright (c) 2021, Roona and Contributors
# See license.txt
# import frappe
import unittest
class TestRoonaAppSetting(unittest.TestCase):
pass
| 16.555556 | 45 | 0.785235 | 51 | 0.342282 | 0 | 0 | 0 | 0 | 0 | 0 | 76 | 0.510067 |
9302285f60696162f64b949eeb7c5d5dad50cb49 | 37,505 | py | Python | workspace/module/maya-python-2.7/LxMaya/command/maShdr.py | no7hings/Lynxi | 43c745198a714c2e5aca86c6d7a014adeeb9abf7 | [
"MIT"
] | 2 | 2018-03-06T03:33:55.000Z | 2019-03-26T03:25:11.000Z | workspace/module/maya-python-2.7/LxMaya/command/maShdr.py | no7hings/lynxi | 43c745198a714c2e5aca86c6d7a014adeeb9abf7 | [
"MIT"
] | null | null | null | workspace/module/maya-python-2.7/LxMaya/command/maShdr.py | no7hings/lynxi | 43c745198a714c2e5aca86c6d7a014adeeb9abf7 | [
"MIT"
] | null | null | null | # coding=utf-8
# noinspection PyUnresolvedReferences
import maya.cmds as cmds
from LxBasic import bscMtdCore, bscObjects, bscMethods
#
from LxPreset import prsConfigure, prsOutputs
#
from LxCore.config import appCfg
#
from LxCore.preset.prod import assetPr
#
from LxDatabase import dtbMtdCore
#
from LxDatabase.data import datHash
#
from LxMaya.command import maUtils, maAttr, maUuid, maTxtr
#
none = ''
#
DEF_mya_default_shading_engine_list = [
'initialShadingGroup',
'initialParticleSE',
'defaultLightSet',
'defaultObjectSet'
]
#
def materialNodeTypeConfig():
dic = bscMtdCore.orderedDict()
#
majorTypes = [
'texture',
'shader',
'utility'
]
for majorType in majorTypes:
nodeTypes = cmds.listNodeTypes(majorType)
for nodeType in nodeTypes:
dic[nodeType] = majorType
return dic
#
def _getNodeShadingEngineNodeStringList(nodepathString):
lis = []
#
shapePath = maUtils._dcc_getNodShapeNodepathStr(nodepathString, 1)
if not shapePath:
shapePath = nodepathString
#
outputNodes = maUtils._getNodeTargetNodeStringList(shapePath, appCfg.DEF_mya_type_shading_engine)
if outputNodes:
[lis.append(i) for i in outputNodes if i not in DEF_mya_default_shading_engine_list]
return lis
#
def getObjectsShadingEngineLis(objectLis):
lis = []
if objectLis:
for nodepathString in objectLis:
shadingEngineLis = _getNodeShadingEngineNodeStringList(nodepathString)
if shadingEngineLis:
[lis.append(i) for i in shadingEngineLis if i not in lis]
return lis
#
def getObjectMaterials(nodepathString):
# List [ <Material Info Nde_Node> ]
materials = []
shadingEngineLis = _getNodeShadingEngineNodeStringList(nodepathString)
if shadingEngineLis:
for shadingEngine in shadingEngineLis:
inputNodes = maUtils._getNodeTargetNodeStringList(shadingEngine, 'materialInfo')
if inputNodes:
for inputNode in inputNodes:
if not inputNode in materials:
materials.append(inputNode)
return materials
#
def getObjectsMaterials(objectLis):
# List [ <Shading Engine Nde_Node> ]
materials = []
if objectLis:
for nodepathString in objectLis:
subMaterials = getObjectMaterials(nodepathString)
for material in subMaterials:
if not material in materials:
materials.append(material)
return materials
# Get Nde_ShaderRef Nodes
def getConnectionNodes(material):
# Sub Method
def getBranch(node):
inputNodes = maUtils._getNodeSourceNodeStringList(node)
if inputNodes:
for node in inputNodes:
if node:
if not node in nodes:
nodes.append(node)
getBranch(node)
# List [ < File Nde_Node > ]
nodes = [material]
# Loop
getBranch(material)
#
return nodes
#
def getMaterialNodes(material):
exceptObjectTypes = ['mesh', 'nurbsSurface', 'nurbsCurve', 'pgYetiMaya', 'nurbsHair']
exceptNodeTypes = ['groupId', 'colorManagementGlobals']
#
materialNodes = []
connectionNodes = getConnectionNodes(material)
for node in connectionNodes:
objectType = maUtils._getNodeShapeCategoryString(node)
nodeType = maUtils._getNodeCategoryString(node)
if not objectType in exceptObjectTypes and not nodeType in exceptNodeTypes:
materialNodes.append(node)
return materialNodes
#
def getTextureNodeLisByObject(objectLis):
textureNodes = []
shadingEngineLis = getObjectsShadingEngineLis(objectLis)
if shadingEngineLis:
for shadingEngine in shadingEngineLis:
nodes = getConnectionNodes(shadingEngine)
if nodes:
for node in nodes:
nodeType = maUtils._getNodeCategoryString(node)
if nodeType in appCfg.MaTexture_NodeTypeLis:
if not node in textureNodes:
textureNodes.append(node)
return textureNodes
#
def getObjectsMaterialNodesRenameDic(objectLis, assetName, assetVariant, assetStage):
dic = bscMtdCore.orderedDict()
if objectLis:
explain = u'''Get Object's Material Rename Data'''
maxValue = len(objectLis)
progressBar = bscObjects.ProgressWindow(explain, maxValue)
for objSeq, nodepathString in enumerate(objectLis):
progressBar.update()
objectType = maUtils._getNodeShapeCategoryString(nodepathString)
materials = getObjectMaterials(nodepathString)
index = 0
if materials:
for matlSeq, material in enumerate(materials):
nodes = getMaterialNodes(material)
if nodes:
for nodSeq, node in enumerate(nodes):
seq = str(index)
hierarchyName = maUtils.getAttrDatum(nodepathString, prsOutputs.Util.basicHierarchyAttrLabel)
if hierarchyName is None:
hierarchyName = assetStage + '_' + objectType + '_' + str(objSeq)
nodeType = maUtils._getNodeCategoryString(node)
#
nodeName = '{0}_{1}_{2}_{3}_{4}_{5}'.format(
prsOutputs.Util.Lynxi_Prefix_Product_Asset, assetName, assetVariant,
hierarchyName,
nodeType, seq
)
dic[node] = nodeName
#
index += 1
return dic
#
def setObjectsMaterialNodesRename(objectLis, assetName, assetVariant, assetStage):
exceptObjectTypes = ['mesh', 'pgYetiMaya', 'nurbsHair', 'aiAOV']
exceptNodeLis = ['time1', 'lambert1', 'defaultColorMgtGlobals', 'layerManager', 'renderLayerManager', assetPr.astUnitModelBridgeGroupName(assetName)]
#
renameDataArray = []
renameDic = getObjectsMaterialNodesRenameDic(objectLis, assetName, assetVariant, assetStage)
if renameDic:
for node, nodeName in renameDic.items():
objectType = maUtils._getNodeShapeCategoryString(node)
if not objectType in exceptObjectTypes:
if not node in exceptNodeLis:
if not node == nodeName:
renameDataArray.append((node, nodeName))
#
if renameDataArray:
# View Progress
explain = u'''Rename Material - Nde_Node'''
maxValue = len(renameDataArray)
progressBar = bscObjects.ProgressWindow(explain, maxValue)
for node, nodeName in renameDataArray:
progressBar.update(nodeName)
print node, nodeName
maUtils.setNodeRename(node, nodeName)
#
def getAovNodesRenameDic(aovNodes, assetName, assetVariant):
dic = bscMtdCore.orderedDict()
if aovNodes:
explain = u'''Get AOV's Rename Data'''
maxValue = len(aovNodes)
progressBar = bscObjects.ProgressWindow(explain, maxValue)
for aovSeq, aov in enumerate(aovNodes):
progressBar.update()
nodes = getMaterialNodes(aov)
if nodes:
for nodSeq, node in enumerate(nodes):
seq = '{0}{1}'.format(
str(aovSeq + 1).zfill(3), str(nodSeq + 1).zfill(3)
)
nodeType = maUtils._getNodeCategoryString(node)
nodeName = '{0}_{1}_{2}_{3}'.format(
assetName, assetVariant,
nodeType, seq
)
dic[node] = nodeName
return dic
#
def setRenameAovNodes(aovNodes, assetName, assetVariant):
exceptObjectTypes = ['mesh', 'pgYetiMaya', 'nurbsHair', 'aiAOV', 'aiAOVDriver', 'aiAOVFilter']
exceptNodeLis = [
'time1',
'lambert1',
'defaultColorMgtGlobals',
'layerManager',
'defaultArnoldDriver', 'defaultArnoldFilter'
]
#
renameDataArray = []
renameDic = getAovNodesRenameDic(aovNodes, assetName, assetVariant)
if renameDic:
for node, nodeName in renameDic.items():
objectType = maUtils._getNodeShapeCategoryString(node)
if not objectType in exceptObjectTypes:
if not node in exceptNodeLis:
if not node == nodeName:
renameDataArray.append((node, nodeName))
#
if renameDataArray:
# View Progress
explain = u'''Rename AOV Nde_Node'''
maxValue = len(renameDataArray)
progressBar = bscObjects.ProgressWindow(explain, maxValue)
for node, nodeName in renameDataArray:
progressBar.update(nodeName)
maUtils.setNodeRename(node, nodeName)
# Nde_Node Data
def getMaterialNodeData(material):
nodesDataArray = []
nodes = getMaterialNodes(material)
if nodes:
for node in nodes:
# Filter Unused Nde_Node Type
nodeType = maUtils._getNodeCategoryString(node)
definedAttrData = maAttr.getNodeDefAttrDatumLis(node)
customAttrData = maAttr.getNodeUserDefAttrData(node)
nodesDataArray.append((node, nodeType, definedAttrData, customAttrData))
return nodesDataArray
#
def getMaterialComponentData(material):
composeDataArray = []
nodes = getMaterialNodes(material)
if nodes:
for node in nodes:
# Filter Unused Nde_Node Type
nodeType = maUtils._getNodeCategoryString(node)
composeDataArray.append(nodeType)
return composeDataArray
#
def getMaterialAttributeData(material):
attributeDataArray = []
nodes = getMaterialNodes(material)
if nodes:
for node in nodes:
# Filter Unused Nde_Node Type
nodeType = maUtils._getNodeCategoryString(node)
definedAttrData = maAttr.getNodeDefAttrDatumLis(node)
customAttrData = maAttr.getNodeUserDefAttrData(node)
attributeDataArray.append(
(nodeType, getNodeAttrDataReduce(definedAttrData), getNodeAttrDataReduce(customAttrData))
)
return attributeDataArray
#
def getNodeAttrDataReduce(attrDatas):
attrDataArray = []
MaTexture_AttrNameLis = maTxtr.MaTexture_AttrNameLis
if attrDatas:
for data in attrDatas:
attrName, data, attrType, lock = data
if attrName in MaTexture_AttrNameLis:
isTexture = maTxtr.isOsTextureExist(data)
if isTexture:
data = bscMethods.OsFile.basename(data)
if not isTexture:
data = none
attrDataArray.append((attrName, data))
return attrDataArray
# Nde_Node Data
def getMaterialsNodeData(materials):
dic = bscMtdCore.orderedDict()
if materials:
for material in materials:
uniqueId = maUuid._getNodeUniqueIdString(material)
shaderNodeData = getMaterialNodeData(material)
if shaderNodeData:
dic[uniqueId] = shaderNodeData
return dic
#
def getMaterialRelationData(material):
MaAttrNameLis_ShaderExcept = [
'.groupNodes',
'.dagSetMembers'
]
#
connectionArray = []
nodes = getConnectionNodes(material)
if nodes:
for node in nodes:
subConnectionArray = maAttr.getNodeConnectionsDataArray(node)
for sourceAttr, targetAttr in subConnectionArray:
isCollection = True
for exceptAttrName in MaAttrNameLis_ShaderExcept:
if exceptAttrName in targetAttr:
isCollection = False
if isCollection:
connectionArray.append((sourceAttr, targetAttr))
return connectionArray
# Nde_Node Data
def getMaterialsRelationData(materials):
dic = bscMtdCore.orderedDict()
if materials:
for material in materials:
uniqueId = maUuid._getNodeUniqueIdString(material)
nodeConnectionData = getMaterialRelationData(material)
if nodeConnectionData:
dic[uniqueId] = nodeConnectionData
return dic
#
def getMaterialComponentInfo(material):
materialComponentData = getMaterialComponentData(material)
return datHash.getStrHashKey(materialComponentData)
#
def getMaterialAttributeInfo(material):
materialAttributeData = getMaterialAttributeData(material)
return datHash.getStrHashKey(materialAttributeData)
#
def getMaterialRelationInfo(material):
connections = getMaterialRelationData(material)
relationData = getNodeConnectionDataReduce(connections)
return datHash.getStrHashKey(relationData)
#
def getNodeConnectionDataReduce(connections):
connectionArray = []
if connections:
for sourceAttr, targetAttr in connections:
if not sourceAttr.endswith('.message'):
connectionArray.append((sourceAttr, targetAttr))
return connectionArray
#
def getMaterialsInformationData(materials):
dic = bscMtdCore.orderedDict()
if materials:
for material in materials:
uniqueId = maUuid._getNodeUniqueIdString(material)
dic[uniqueId] = \
getMaterialComponentInfo(material), \
getMaterialAttributeInfo(material), \
getMaterialRelationInfo(material)
return dic
#
def setCreateCompMaterialsNodes(materialsNodeData):
if materialsNodeData:
for uniqueId, nodeDataArray in materialsNodeData.items():
if nodeDataArray:
keyNodeData = nodeDataArray[0]
setCreateMaterialNode(keyNodeData)
material = keyNodeData[0]
maUuid.setMayaUniqueId(material, uniqueId)
for subNodeData in nodeDataArray[0:]:
setCreateMaterialNode(subNodeData)
#
def setCreateCompMaterialsUniqueId(materialsNodeData):
if materialsNodeData:
for uniqueId, nodeDataArray in materialsNodeData.items():
if nodeDataArray:
keyNodeData = nodeDataArray[0]
setCreateMaterialNode(keyNodeData)
material = keyNodeData[0]
maUuid.setMayaUniqueId(material, uniqueId)
#
def setCreateCompAovsNodes(materialsNodeData):
if materialsNodeData:
for uniqueId, nodeDataArray in materialsNodeData.items():
if nodeDataArray:
keyNodeData = nodeDataArray[0]
setCreateMaterialNode(keyNodeData)
aovNode = keyNodeData[0]
maUuid.setMayaUniqueId(aovNode, uniqueId)
setCreateAovNodeLink(aovNode)
for subNodeData in nodeDataArray[0:]:
setCreateMaterialNode(subNodeData)
#
def setCreateMaterialNode(materialNodeData):
node, nodeType, definedAttrData, customAttrData = materialNodeData
#
setCreateNode(node, nodeType, definedAttrData)
# Set User Attribute
maAttr.setObjectUserDefinedAttrs(node, customAttrData, lockAttribute=False)
#
def setCreateNode(node, nodeType, definedAttrData):
shaderNodeTypeDic = materialNodeTypeConfig()
# Filter Exists
if not cmds.objExists(node):
isShader = nodeType in shaderNodeTypeDic.keys()
# Filter is Nde_ShaderRef Nde_Node
if not isShader:
cmds.createNode(nodeType, name=node)
#
if isShader:
majorType = shaderNodeTypeDic[nodeType]
if majorType == 'texture':
cmds.shadingNode(nodeType, name=node, asTexture=1)
elif majorType == 'shader':
cmds.shadingNode(nodeType, name=node, asShader=1)
elif majorType == 'utility':
cmds.shadingNode(nodeType, name=node, asUtility=1)
# Set Nde_Node Attribute
maAttr.setNodeDefAttrByData(node, definedAttrData, lockAttribute=False)
#
def setCreateMaterialsConnections(connectionData):
if connectionData:
for uniqueId, connectionArray in connectionData.items():
maAttr.setCreateConnections(connectionArray)
#
def getMaterialEvaluateData(objectLis):
exceptObjectTypes = [
'mesh',
'pgYetiMaya',
'nurbsHair'
]
#
exceptNodeLis = [
'time1',
'lambert1',
'defaultColorMgtGlobals'
]
#
dic = bscMtdCore.orderedDict()
totalMaterials = []
totalNodes = []
totalConnections = []
if objectLis:
for nodepathString in objectLis:
shadingEngineLis = _getNodeShadingEngineNodeStringList(nodepathString)
if shadingEngineLis:
for shadingEngine in shadingEngineLis:
if not shadingEngine in totalMaterials:
totalMaterials.append(shadingEngine)
# Nde_Node
nodes = getMaterialNodes(shadingEngine)
if nodes:
for node in nodes:
objectType = maUtils._getNodeShapeCategoryString(node)
if not objectType in exceptObjectTypes:
if not node in exceptNodeLis:
if not node in totalNodes:
totalNodes.append(node)
# Connection
connectionArray = getMaterialRelationData(shadingEngine)
if connectionArray:
for connection in connectionArray:
if not connection in totalConnections:
totalConnections.append(connection)
dic['material'] = len(totalMaterials)
dic['node'] = len(totalNodes)
dic['connection'] = len(totalConnections)
return dic
#
def getObjectsMaterialRelinkData(objectLis):
shaderObjectTypes = ['mesh', 'pgYetiMaya', 'nurbsHair']
dic = bscMtdCore.orderedDict()
for nodepathString in objectLis:
linkDatumLis = []
shape = maUtils._dcc_getNodShapeNodepathStr(nodepathString, fullPath=1)
shadingEngineLis = maUtils._getNodeTargetNodeStringList(shape, appCfg.DEF_mya_type_shading_engine)
if shadingEngineLis:
for shadingEngine in shadingEngineLis:
elementSetData = cmds.sets(shadingEngine, query=1)
if elementSetData:
elementSetFullPathData = [i for i in cmds.ls(elementSetData, leaf=1, noIntermediate=1, long=1)]
for data in elementSetFullPathData:
# Object Group
if data.startswith(nodepathString):
showType = cmds.ls(data, showType=1)[1]
if showType in shaderObjectTypes:
linkData = none, shadingEngine
if not linkData in linkDatumLis:
linkDatumLis.append(linkData)
# Component Object Group
if showType == 'float3':
componentObjectIndex = data.split('.')[-1]
linkData = '.' + componentObjectIndex, shadingEngine
if not linkData in linkDatumLis:
linkDatumLis.append(linkData)
dic[nodepathString] = linkDatumLis
return dic
#
def getMaterialShadingEngine(uniqueId):
material = maUuid.getObject(uniqueId)
if material:
shadingEngineLis = maUtils._getNodeSourceNodeStringList(material, appCfg.DEF_mya_type_shading_engine)
if shadingEngineLis:
return shadingEngineLis[0]
#
def getShadingEngineMaterialUniqueId(shadingEngine):
materials = maUtils._getNodeTargetNodeStringList(shadingEngine, 'materialInfo')
if materials:
material = materials[0]
return maUuid._getNodeUniqueIdString(material)
#
def getShaderObjectsObjSetDic(objectLis):
dic = bscMtdCore.orderedDict()
for nodepathString in objectLis:
compIndex = maUuid._getNodeUniqueIdString(nodepathString)
linkDatumLis = getShaderObjectObjSetSub(nodepathString)
dic[compIndex] = linkDatumLis
return dic
#
def getShaderObjectObjSetSub(nodepathString):
shaderObjectTypes = ['mesh', 'pgYetiMaya', 'nurbsHair']
#
lis = []
#
shadingEngineLis = _getNodeShadingEngineNodeStringList(nodepathString)
if shadingEngineLis:
for shadingEngine in shadingEngineLis:
compMaterialIndex = getShadingEngineMaterialUniqueId(shadingEngine)
elementSetData = cmds.sets(shadingEngine, query=1)
if elementSetData:
elementSetFullPathData = [i for i in cmds.ls(elementSetData, leaf=1, noIntermediate=1, long=1)]
for data in elementSetFullPathData:
# Object Group
if data.startswith(nodepathString):
showType = cmds.ls(data, showType=1)[1]
if showType in shaderObjectTypes:
linkData = none, compMaterialIndex
if not linkData in lis:
lis.append(linkData)
# Component Object Group
if showType == 'float3':
componentObjectIndex = data.split('.')[-1]
linkData = '.' + componentObjectIndex, compMaterialIndex
if not linkData in lis:
lis.append(linkData)
return lis
# Link Material
def setLinkObjectsMaterial(data, objectNamespace=none, materialNamespace=none):
if data:
# View Progress
explain = u'''Link / Relink Material'''
maxValue = len(data)
progressBar = bscObjects.ProgressWindow(explain, maxValue)
for nodepathString, linkDatumLis in data.items():
# In Progress
progressBar.update()
#
usedObject = \
[nodepathString, maUtils.getObjectPathJoinNamespace(nodepathString, objectNamespace)][objectNamespace != none]
#
if linkDatumLis:
# Clear >>> 01
setObjectCleanTransformShadingEngine(usedObject)
setObjectCleanShapeShadingEngine(usedObject)
# Link >>> 02
isComponentLink = len(linkDatumLis) > 1
#
if not isComponentLink:
componentObjectIndex, shadingEngine = linkDatumLis[0]
usedShadingEngine = [shadingEngine, maUtils.getNodeJoinNamespace(shadingEngine, materialNamespace)][materialNamespace != none]
setObjectAssignMaterial(usedObject, none, usedShadingEngine)
#
if isComponentLink:
for componentObjectIndex, shadingEngine in linkDatumLis:
usedShadingEngine = [shadingEngine, maUtils.getNodeJoinNamespace(shadingEngine, materialNamespace)][materialNamespace != none]
setObjectAssignMaterial(usedObject, componentObjectIndex, usedShadingEngine)
# Link Material
def setMaterialsObjectSetsConnect(datumDic):
if datumDic:
# View Progress
explain = u'''Connect Material's Object Set(s)'''
maxValue = len(datumDic)
progressBar = bscObjects.ProgressWindow(explain, maxValue)
for compIndex, linkDatumLis in datumDic.items():
progressBar.update()
#
setMaterialObjectSetConnect(compIndex, linkDatumLis)
#
def setMaterialObjectSetConnect(compIndex, linkDatumLis):
nodepathString = maUuid.getObject(compIndex, fullPath=1)
if nodepathString:
# Clear >>> 01
if linkDatumLis:
setObjectCleanTransformShadingEngine(nodepathString)
setObjectCleanShapeShadingEngine(nodepathString)
# Link >>> 02
isComponentLink = len(linkDatumLis) > 1
#
if not isComponentLink:
componentObjectIndex, compMaterialIndex = linkDatumLis[0]
shadingEngine = getMaterialShadingEngine(compMaterialIndex)
setObjectAssignMaterial(nodepathString, None, shadingEngine)
#
else:
for componentObjectIndex, compMaterialIndex in linkDatumLis:
shadingEngine = getMaterialShadingEngine(compMaterialIndex)
setObjectAssignMaterial(nodepathString, componentObjectIndex, shadingEngine)
#
def setObjectCleanShadingEngine(nodepathString):
setObjectCleanTransformShadingEngine(nodepathString)
setObjectCleanShapeShadingEngine(nodepathString)
#
def setObjectCleanShapeShadingEngine(nodepathString):
shape = maUtils._dcc_getNodShapeNodepathStr(nodepathString)
shapeShadingEngines = maUtils._getNodeTargetNodeStringList(shape, appCfg.DEF_mya_type_shading_engine)
if shapeShadingEngines:
[cmds.sets(shape, remove=i) for i in shapeShadingEngines]
#
def setObjectCleanTransformShadingEngine(nodepathString):
outputConnections = maUtils.getNodeOutputConnectionLis(nodepathString)
if outputConnections:
for sourceAttr, targetAttr in outputConnections:
if sourceAttr.endswith('instObjGroups'):
maUtils.setAttrDisconnect(sourceAttr, targetAttr)
#
def setObjectDefaultShadingEngine(nodepathString):
shape = maUtils._dcc_getNodShapeNodepathStr(nodepathString)
shadingEngineLis = maUtils._getNodeTargetNodeStringList(shape, appCfg.DEF_mya_type_shading_engine)
if not shadingEngineLis:
cmds.sets(shape, forceElement='initialShadingGroup')
#
def setObjectsDefaultShadingEngine(componentObjectIndexes):
for componentObjectIndex in componentObjectIndexes:
nodepathString = maUuid.getObject(componentObjectIndex)
setObjectDefaultShadingEngine(nodepathString)
#
def setObjectAssignMaterial(nodepathString, componentObjectIndex, shadingEngine):
if componentObjectIndex is None:
linkObject = maUtils._dcc_getNodShapeNodepathStr(nodepathString, 1)
else:
linkObject = nodepathString + componentObjectIndex
#
if maUtils._isAppExist(linkObject):
if maUtils._isAppExist(shadingEngine):
cmds.sets(linkObject, forceElement=shadingEngine)
setCreateLightLink(shadingEngine)
else:
cmds.sets(linkObject, forceElement='initialShadingGroup')
#
def setCreateLightLink(shadingEngine):
def getUsedConnectionIndex():
for i in range(5000):
if isUsedPartitionConnectionIndex(i) \
and isUsedObjectLinkConnectionIndex(i) \
and isUsedShadowObjectLinkConnectionIndex(i) \
and isUsedLightLinkConnectionIndex(i) \
and isUsedShadowLightLinkConnectionIndex(i):
return i
#
def isUsedConnection(connection):
boolean = False
if cmds.objExists(connection):
if not cmds.connectionInfo(connection, isDestination=1):
boolean = True
return boolean
#
def isUsedPartitionConnectionIndex(index):
connection = appCfg.MaRenderPartition + '.sets[%s]' % index
return isUsedConnection(connection)
#
def isUsedObjectLinkConnectionIndex(index):
connection = appCfg.MaNodeName_LightLink + '.link[%s].object' % index
return isUsedConnection(connection)
#
def isUsedShadowObjectLinkConnectionIndex(index):
connection = appCfg.MaNodeName_LightLink + '.shadowLink[%s].shadowObject' % index
return isUsedConnection(connection)
#
def isUsedLightLinkConnectionIndex(index):
connection = appCfg.MaNodeName_LightLink + '.link[%s].light' % index
return isUsedConnection(connection)
#
def isUsedShadowLightLinkConnectionIndex(index):
connection = appCfg.MaNodeName_LightLink + '.shadowLink[%s].shadowLight' % index
return isUsedConnection(connection)
#
def setMain():
index = getUsedConnectionIndex()
if index:
# Debug ( Repeat )
if not cmds.connectionInfo(shadingEngine + '.partition', isSource=1):
cmds.connectAttr(shadingEngine + '.partition', appCfg.MaRenderPartition + '.sets[%s]' % index)
cmds.connectAttr(shadingEngine + '.message', appCfg.MaNodeName_LightLink + '.link[%s].object' % index)
cmds.connectAttr(shadingEngine + '.message', appCfg.MaNodeName_LightLink + '.shadowLink[%s].shadowObject' % index)
cmds.connectAttr(appCfg.MaNodeName_DefaultLightSet + '.message', appCfg.MaNodeName_LightLink + '.link[%s].light' % index)
cmds.connectAttr(appCfg.MaNodeName_DefaultLightSet + '.message', appCfg.MaNodeName_LightLink + '.shadowLink[%s].shadowLight' % index)
#
setMain()
#
def getAovNodeLis(renderer):
aovNodes = []
if renderer == prsConfigure.Utility.DEF_value_renderer_arnold:
aovNodes = getArnoldAovNodeLis()
elif renderer == prsConfigure.Utility.DEF_value_renderer_redshift:
aovNodes = getRedshiftAovNodes()
return aovNodes
# Get Arnold's Aov
def getArnoldAovNodeLis():
lis = []
if maUtils.isArnoldEnable():
lis = maUtils._getNodeSourceNodeStringList('defaultArnoldRenderOptions', 'aiAOV')
return lis
# Get Redshift's Aov
def getRedshiftAovNodes():
lis = []
if maUtils.isRedshiftEnable():
lis = maUtils.getNodeLisByType('RedshiftAOV')
return lis
#
def getAovNodesData(renderer):
aovNodesData = bscMtdCore.orderedDict()
if renderer == 'Arnold':
aovNodesData = getArnoldAovNodesData()
if renderer == 'Redshift':
aovNodesData = getRedshiftAovNodesData()
return aovNodesData
#
def getArnoldAovNodesData():
dic = bscMtdCore.orderedDict()
aovNodes = getArnoldAovNodeLis()
if aovNodes:
for aovNode in aovNodes:
aovName = maUtils.getAttrDatum(aovNode, 'name')
dic[aovNode] = aovName
return dic
#
def getRedshiftAovNodesData():
dic = bscMtdCore.orderedDict()
aovNodes = getRedshiftAovNodes()
if aovNodes:
for aovNode in aovNodes:
aovName = maUtils.getAttrDatum(aovNode, 'name')
dic[aovNode] = aovName
return dic
# noinspection PyUnresolvedReferences
def getArnoldOption():
if prsMethods.Project.isMayaUsedArnoldRenderer():
try:
import mtoa.core as core
#
core.createOptions()
except:pass
#
def setCreateAovNodeLink(aovNode, maxDepth=50):
def getAovListAttr():
aovListAttrs = ['%s.aovList[%s]' % ('defaultArnoldRenderOptions', i) for i in range(0, maxDepth)]
for aovListAttr in aovListAttrs:
if maUtils._isAppExist(aovListAttr):
if not maAttr.isAttrDestination(aovListAttr):
return aovListAttr
#
def setMain():
aovMessageAttr = aovNode + '.message'
if maUtils._isAppExist(aovMessageAttr):
if not maAttr.isAttrSource(aovMessageAttr):
aovListAttr = getAovListAttr()
if aovListAttr:
cmds.connectAttr(aovMessageAttr, aovListAttr)
#
setMain()
@dtbMtdCore.fncThreadSemaphoreModifier
def setRepairAovNodesLink():
getArnoldOption()
aovs = maUtils.getNodeLisByType('aiAOV')
if aovs:
[setCreateAovNodeLink(i) for i in aovs]
#
def setRepairArnoldAov(aovNodes=None):
if not aovNodes:
aovNodes = cmds.ls(type='aiAOV')
#
defDriverAttr = 'defaultArnoldDriver.message'
defFilterAttr = 'defaultArnoldFilter.message'
if aovNodes:
for aovNode in aovNodes:
outputDriverAttr = aovNode + '.outputs[0].driver'
outputFilterAttr = aovNode + '.outputs[0].filter'
inputConnections = maUtils.getNodeInputConnectionLis(aovNode)
for inputAttr, outputAttr in inputConnections:
if outputAttr == outputDriverAttr:
if inputAttr != defDriverAttr:
maUtils.setAttrDisconnect(inputAttr, outputAttr)
maUtils.setAttrConnect(defDriverAttr, outputDriverAttr)
#
if outputAttr == outputFilterAttr:
if inputAttr != defFilterAttr:
maUtils.setAttrDisconnect(inputAttr, outputAttr)
maUtils.setAttrConnect(defFilterAttr, outputFilterAttr)
else:
if not cmds.isConnected(defDriverAttr, outputDriverAttr):
cmds.connectAttr(defDriverAttr, outputDriverAttr)
#
if not cmds.isConnected(defFilterAttr, outputFilterAttr):
cmds.connectAttr(defFilterAttr, outputFilterAttr)
#
def getObjectsAttrData(objectLis):
dic = bscMtdCore.orderedDict()
#
if objectLis:
for nodepathString in objectLis:
objectShape = maUtils._dcc_getNodShapeNodepathStr(nodepathString)
uniqueId = maUuid._getNodeUniqueIdString(nodepathString)
renderAttrData = maAttr.getNodeRenderAttrData(objectShape)
plugAttrData = maAttr.getNodePlugAttrData(objectShape)
customAttrData = maAttr.getNodeUserDefAttrData(objectShape)
dic[uniqueId] = renderAttrData, plugAttrData, customAttrData
return dic
#
def setObjectsAttrsCreate(datumDic):
if datumDic:
# View Progress
explain = u'''Set Material's Object Attribute(s)'''
maxValue = len(datumDic)
progressBar = bscObjects.ProgressWindow(explain, maxValue)
for uniqueId, attrData in datumDic.items():
progressBar.update()
#
nodepathString = maUuid.getObject(uniqueId)
if nodepathString:
setObjectAttrsCreate(nodepathString, attrData)
#
def setObjectAttrsCreate(nodepathString, attrData):
if attrData:
objectShape = maUtils._dcc_getNodShapeNodepathStr(nodepathString, 1)
renderAttrData, plugAttrData, customAttrData = attrData
if renderAttrData:
maAttr.setNodeDefAttrByData(objectShape, renderAttrData)
if plugAttrData:
maAttr.setNodeDefAttrByData(objectShape, plugAttrData)
if customAttrData:
maAttr.setObjectUserDefinedAttrs(objectShape, customAttrData)
#
def setRefreshTextureColorSpace(textureNodes):
if textureNodes:
for i in textureNodes:
colorSpace = maUtils.getAttrDatum(i, 'colorSpace')
if not colorSpace == 'sRGB':
maUtils.setAttrDatumForce_(i, 'ignoreColorSpaceFileRules', 1)
#
def setArnoldShaderCovert(nodepathString, texturePath):
nodeTypeLis = [
'aiStandardSurface'
]
shadingEngineLis = _getNodeShadingEngineNodeStringList(nodepathString)
if shadingEngineLis:
for shadingEngine in shadingEngineLis:
targetAttr0 = maUtils._toNodeAttr([shadingEngine, 'surfaceShader'])
stringLis = maUtils.getInputNodeLisByAttr(targetAttr0)
if stringLis:
nodeName = stringLis[0]
nodeType = maUtils._getNodeCategoryString(nodeName)
if nodeType in nodeTypeLis:
sourceAttr0 = maUtils._toNodeAttr([nodeName, 'outColor'])
targetAttr1 = maUtils._toNodeAttr([shadingEngine, 'aiSurfaceShader'])
#
cmds.disconnectAttr(sourceAttr0, targetAttr0)
#
if not cmds.isConnected(sourceAttr0, targetAttr1):
cmds.connectAttr(sourceAttr0, targetAttr1)
#
colorShaderNodeName = shadingEngine + 'cls_colorShader'
if not cmds.objExists(colorShaderNodeName):
cmds.shadingNode('blinn', n=colorShaderNodeName, asShader=True)
#
sourceAttr2 = maUtils._toNodeAttr([colorShaderNodeName, 'outColor'])
cmds.connectAttr(sourceAttr2, targetAttr0)
#
if nodeType == 'aiStandardSurface':
inputAttr = maUtils._toNodeAttr([nodeName, 'baseColor'])
stringLis = maUtils.getInputAttrByAttr(inputAttr)
if stringLis:
textureNodeName = shadingEngine + 'CLS_color'
#
texture = texturePath + '/' + textureNodeName + '.jpg'
attr = stringLis[0]
cmds.convertSolidTx(
attr,
name=textureNodeName,
resolutionX=1024, resolutionY=1024,
samplePlane=1,
fileImageName=texture,
fileFormat='jpg'
)
#
cmds.connectAttr(textureNodeName + '.outColor', colorShaderNodeName + '.color')
| 36.878073 | 153 | 0.627916 | 0 | 0 | 0 | 0 | 195 | 0.005199 | 0 | 0 | 2,708 | 0.072204 |
9302303524c2d1d2dcea444d0b37ee6e17561f6a | 441 | py | Python | db/Service.py | hamedsh/healthCheck | 8f6b8ffffc1f1d8849a58b4966e54d30ead9556b | [
"Apache-2.0"
] | null | null | null | db/Service.py | hamedsh/healthCheck | 8f6b8ffffc1f1d8849a58b4966e54d30ead9556b | [
"Apache-2.0"
] | null | null | null | db/Service.py | hamedsh/healthCheck | 8f6b8ffffc1f1d8849a58b4966e54d30ead9556b | [
"Apache-2.0"
] | null | null | null | import json
class Service(object):
id: int = None
name: str = None
type: int = None
type_name: str = None
repeat_period: int = 5 # repeat period by second
metadata = {}
def __init__(self, arr: list):
self.id = arr[0]
self.name = arr[1]
self.type = arr[2]
self.type_name = arr[3]
self.repeat_period = arr[4]
self.metadata = json.loads(arr[5].replace("'", '"'))
| 25.941176 | 60 | 0.557823 | 428 | 0.970522 | 0 | 0 | 0 | 0 | 0 | 0 | 31 | 0.070295 |
9302d12e0eb5ea7aeef313eeca626e9071df7895 | 22,422 | py | Python | nbpipeline/rules.py | krassowski/nbpipeline | c2337db2b19767b2cdfcc9bf019e2bf687bb4423 | [
"MIT"
] | 16 | 2019-07-17T17:15:52.000Z | 2021-11-19T07:15:19.000Z | nbpipeline/rules.py | krassowski/nbpipeline | c2337db2b19767b2cdfcc9bf019e2bf687bb4423 | [
"MIT"
] | 5 | 2019-07-17T21:09:31.000Z | 2020-04-18T18:53:46.000Z | nbpipeline/rules.py | krassowski/nbpipeline | c2337db2b19767b2cdfcc9bf019e2bf687bb4423 | [
"MIT"
] | 2 | 2019-12-18T19:23:38.000Z | 2021-03-05T10:55:48.000Z | import json
import pickle
import re
from copy import copy, deepcopy
from functools import lru_cache
from json import JSONDecodeError
from os import system, walk, sep
from abc import ABC, abstractmethod
from pathlib import Path
import time
from subprocess import check_output
from tempfile import NamedTemporaryFile
from warnings import warn
from .utils import subset_dict_preserving_order, run_command, nice_time
class no_quotes(str):
def __repr__(self):
original = super().__repr__()
return original[1:-1]
class Rule(ABC):
"""Design principles (or how does it differ from snakemake):
- fully python3; no strange make/python mishmash
- prefer verbosity over ambiguity (named inputs/outputs)
- Jupyter centered
- interactive graphs
- implicit passing of arguments to the executing command
"""
cache_dir: Path
tmp_dir: Path
is_setup = False
rules = {}
def __init__(self, name, **kwargs):
"""Notes:
- input and output will be passed in the same order as it appears in kwargs
- if the input is a dictionary, the keys will be interpreted as argument names;
empty key can be used to insert a positional argument
- the arguments will be serialized preserving the Python type, i.e.
input={'name': 1}
may result in:
--name 1
while:
input={'name': "1"}
would result in
--name "1"
You can force string to be displayed without quotes using:
input={'name': no_quotes("1")}
"""
assert name not in self.rules
self.name = name
self.execution_time = None
self.rules[name] = self
extra_kwargs = set(kwargs) - {'output', 'input', 'group', 'parameters'}
if extra_kwargs:
raise Exception(f'Unrecognized keyword arguments to {self.__class__.__name__}: {extra_kwargs}')
self.arguments = subset_dict_preserving_order(
kwargs,
{'input', 'output', 'parameters'}
)
self.group = kwargs.get('group', None)
self.outputs = {}
self.inputs = {}
self.parameters = {}
if 'output' in kwargs:
output = kwargs['output']
# todo support lists of positionals
self.outputs = output if isinstance(output, dict) else {'': output}
if 'input' in kwargs:
input = kwargs['input']
self.inputs = input if isinstance(input, dict) else {'': input}
if 'parameters' in kwargs:
self.parameters = kwargs['parameters']
@property
def has_inputs(self):
return len(self.inputs) != 0
@property
def has_outputs(self):
return len(self.outputs) != 0
@abstractmethod
def run(self, use_cache: bool) -> int:
if not self.is_setup:
raise ValueError('Please set up the rules class settings with Rule.setup() first!')
@classmethod
def setup(cls, cache_dir: Path, tmp_dir: Path):
cls.cache_dir = Path(cache_dir)
cls.tmp_dir = Path(tmp_dir)
cls.is_setup = True
@abstractmethod
def to_json(self):
pass
def __repr__(self):
fragments = [repr(self.name)]
if self.group:
fragments.append(f'({self.group})')
if self.has_inputs or self.has_outputs:
fragments.append('with')
if self.has_inputs:
fragments.append(f'{len(self.inputs)} inputs')
if self.has_inputs and self.has_outputs:
fragments.append('and')
if self.has_outputs:
fragments.append(f'{len(self.outputs)} outputs')
fragments = ' '.join(fragments)
return f'<{self.__class__.__name__} {fragments}>'
class Group:
"""A group of rules"""
groups = {}
def __init__(self, id: str, name: str, color='#cccccc', parent=None):
assert name not in self.groups
self.name = name
self.id = id
self.color = color
self.groups[id] = self
self.parent = parent
def to_json(self):
return {
'label': self.name,
'id': self.id,
'color': self.color,
'parent': self.parent
}
class ShellRule(Rule):
"""
Named arguments will be passed in order,
preceded with a single dash for single letter names
or a double dash for longer names.
"""
def __init__(self, name, command, **kwargs):
super().__init__(self, name, **kwargs)
self.command = command
def serialize(self, arguments_group):
if isinstance(arguments_group, dict):
return ' '.join(
(
(
('-' + key if len(key) == 1 else '--' + key)
+
' '
)
if len(key) else
''
) + (
repr(value)
)
for key, value in arguments_group.items()
)
else:
return repr(arguments_group)
@property
def serialized_arguments(self):
return ' '.join({
self.serialize(arguments_group)
for arguments_group in self.arguments.values()
})
def run(self, use_cache=False) -> int:
super().run(use_cache)
start_time = time.time()
status = system(f'{self.command} {self.serialized_arguments}')
self.execution_time = time.time() - start_time
return status
def to_json(self):
return {
'name': self.command,
'arguments': self.serialized_arguments,
'execution_time': self.execution_time,
'type': 'shell'
}
def expand_run_magics(notebook):
out_notebook = copy(notebook)
new_cells = []
for cell in notebook['cells']:
if cell['cell_type'] != 'code':
new_cells.append(cell)
continue
if any(line.startswith('%run') for line in cell['source']):
other_code = []
for line in cell['source']:
if line.startswith('%run'):
if other_code:
split_cell = copy(cell)
split_cell['source'] = other_code
new_cells.append(split_cell)
other_code = []
to_include = line[5:].strip()
with open(to_include) as o:
nb_run = json.load(o)
new_cells.extend(nb_run['cells'])
else:
other_code.append(line)
if other_code:
split_cell = copy(cell)
split_cell['source'] = other_code
new_cells.append(split_cell)
else:
new_cells.append(cell)
out_notebook['cells'] = new_cells
return out_notebook
class NotebookRule(Rule):
options: None
@property
def output_nb_dir(self) -> Path:
return self.tmp_dir / 'out'
@property
def reference_nb_dir(self) -> Path:
return self.tmp_dir / 'ref'
@property
def stripped_nb_dir(self) -> Path:
return self.tmp_dir / 'stripped'
def __init__(
self, *args, notebook,
diff=True,
deduce_io=True,
deduce_io_from_data_vault=True,
execute=True,
**kwargs
):
"""Rule for Jupyter Notebooks
Args:
deduce_io: whether to automatically deduce inputs and outputs from the code cells tagged "inputs" and "outputs";
local variables defined in the cell will be evaluated and used as inputs or outputs.
If you want to generate paths with a helper function for brevity, assign a dict of {variable: path}
to `__inputs__`/`__outputs__` in the tagged cell using `io.create_paths()` helper.
diff: whether to generate diffs against the current state of the notebook
deduce_io_from_data_vault: whether to deduce the inputs and outputs from `data_vault` magics
(`%vault store` and `%vault import`), see https://github.com/krassowski/data-vault
execute: if False, the notebook will note be run; useful to include final "leaf" notebooks
which may take too long to run, but are not essential to the overall results
"""
super().__init__(*args, **kwargs)
self.todos = []
self.notebook = notebook
self.absolute_notebook_path = Path(notebook).absolute()
self.generate_diff = diff
self.diff = None
self.text_diff = None
self.fidelity = None
self.images = []
self.headers = []
self.status = None
self.execute = execute
from datetime import datetime, timedelta
month_ago = (datetime.today() - timedelta(days=30)).timestamp()
self.changes = run_command(f'git rev-list --max-age {month_ago} HEAD --count {self.notebook}')
if deduce_io:
self.deduce_io_from_tags()
if deduce_io_from_data_vault:
self.deduce_io_from_data_vault()
def deduce_io_from_data_vault(self):
notebook_json = self.notebook_json
stored = set()
for index, cell in enumerate(notebook_json['cells']):
if 'source' not in cell:
continue
for line in cell['source']:
if line.startswith('%vault'):
try:
from data_vault import VaultMagics
from data_vault.actions import ImportAction, StoreAction
from data_vault.parsing import split_variables, unquote
except ImportError:
warn('Could not deduce I/O from data-vault %vault magics: data_vault not installed')
return
vault_magics = VaultMagics()
arguments = vault_magics.extract_arguments(line[7:])
action = vault_magics.select_action(arguments)
if isinstance(action, ImportAction):
variables = arguments['import']
for var_index, variable in enumerate(split_variables(variables)):
if 'from' in arguments:
import_path = arguments['from'] + '/' + variable
else:
import_path = unquote(arguments['import'])
if import_path in stored:
warn(f'Skipping {line} which was previously stored from this notebook to avoid cycles')
else:
self.inputs[(index, var_index)] = import_path
elif isinstance(action, StoreAction):
variables = split_variables(arguments['store'])
if 'as' in arguments:
assert len(variables) == 1
variables = [arguments['as']]
for var_index, variable in enumerate(variables):
store_path = arguments['in'] + '/' + variable
self.outputs[(index, var_index)] = store_path
stored.add(store_path)
def deduce_io_from_tags(self, io_tags={'inputs', 'outputs'}):
notebook_json = self.notebook_json
io_cells = {}
for index, cell in enumerate(notebook_json['cells']):
if 'tags' in cell['metadata']:
cell_io_tags = io_tags.intersection(cell['metadata']['tags'])
if cell_io_tags:
assert len(cell_io_tags) == 1
io_cells[list(cell_io_tags)[0]] = cell, index
for io, (cell, index) in io_cells.items():
assert not getattr(self, f'has_{io}')
source = ''.join(cell['source'])
if f'__{io}__' in source:
assert len(cell['outputs']) == 1
# TODO: search through lists
values = cell['outputs'][0]['metadata']
else:
# so we don't want to use eval (we are not within an isolated copy yet!),
# thus only simple regular expression matching which will fail on multi-line strings
# (and anything which is dynamically generated)
assignments = {
match.group('key'): match.group('value')
for match in re.finditer(r'^\s*(?P<key>.*?)\s*=\s*([\'"])(?P<value>.*)\2', source, re.MULTILINE)
}
values = {
key: value
for key, value in assignments.items()
if key.isidentifier() and value
}
if len(assignments) != len(values):
# TODO: add nice exception or warning
raise
setattr(self, io, values)
def serialize(self, arguments_group):
return '-p ' + (' -p '.join(
f'{key} {value}'
for key, value in arguments_group.items()
))
@property
def serialized_arguments(self):
return ' '.join({
self.serialize(arguments_group)
for arguments_group in self.arguments.values()
if arguments_group
})
def outline(self, max_depth=3):
return self.headers
@property
@lru_cache()
def notebook_json(self):
with open(self.absolute_notebook_path) as f:
return expand_run_magics(json.load(f))
def maybe_create_output_dirs(self):
if self.has_outputs:
for name, output in self.outputs.items():
path = Path(output)
path = path.parent
if not path.exists():
print(f'Creating path "{path}" for "{name}" output argument')
path.mkdir(parents=True, exist_ok=True)
def run(self, use_cache=True) -> int:
"""
Run JupyterNotebook using PaperMill and compare the output with reference using nbdime
Returns: status code from the papermill run (0 if successful)
"""
super().run(use_cache)
notebook = self.notebook
path = Path(notebook)
output_nb_dir = self.output_nb_dir / path.parent
output_nb_dir.mkdir(parents=True, exist_ok=True)
reference_nb_dir = self.reference_nb_dir / path.parent
reference_nb_dir.mkdir(parents=True, exist_ok=True)
stripped_nb_dir = self.stripped_nb_dir / path.parent
stripped_nb_dir.mkdir(parents=True, exist_ok=True)
output_nb = output_nb_dir / path.name
reference_nb = reference_nb_dir / path.name
stripped_nb = stripped_nb_dir / path.name
md5 = run_command(f'md5sum {str(self.absolute_notebook_path)}').split()[0]
cache_dir = self.cache_dir / path.parent
cache_dir.mkdir(parents=True, exist_ok=True)
cache_nb_file = cache_dir / f'{md5}.json'
to_cache = ['execution_time', 'fidelity', 'diff', 'text_diff', 'todos', 'headers', 'images']
if use_cache and cache_nb_file.exists():
with open(cache_nb_file, 'rb') as f:
pickled = pickle.load(f)
print(f'Reusing cached results for {self}')
for key in to_cache:
setattr(self, key, pickled[key])
return 0
notebook_json = self.notebook_json
self.images = [
output['data']['image/png']
for cell in notebook_json['cells']
for output in cell.get('outputs', [])
if 'data' in output and 'image/png' in output['data']
]
self.headers = []
for cell in notebook_json['cells']:
if cell['cell_type'] == 'markdown':
for line in cell['source']:
if line.startswith('#'):
self.headers.append(line)
for cell in notebook_json['cells']:
for line in cell.get('source', ''):
if 'TODO' in line:
self.todos.append(line)
# strip outputs (otherwise if it stops, the diff will be too optimistic)
notebook_stripped = deepcopy(notebook_json)
for cell in notebook_json['cells']:
cell['outputs'] = []
with open(stripped_nb, 'w') as f:
json.dump(notebook_stripped, f)
if self.execute:
# execute
start_time = time.time()
status = system(f'papermill {stripped_nb} {output_nb} {self.serialized_arguments}') or 0
self.execution_time = time.time() - start_time
else:
status = 0
warn(f'Skipping {self} (execute != True)')
if self.execute and self.generate_diff:
# inject parameters to a "reference" copy (so that we do not have spurious noise in the diff)
system(
f'papermill {self.absolute_notebook_path} {reference_nb} {self.serialized_arguments} --prepare-only'
# do not print "Input Notebook:" and "Output Notebook:" for the second time
' --log-level WARNING'
)
with NamedTemporaryFile(delete=False) as tf:
command = f'nbdiff {reference_nb} {output_nb} --ignore-metadata --ignore-details --out {tf.name}'
result = run_command(command)
with open(tf.name) as f:
try:
self.diff = json.load(f)
except JSONDecodeError as e:
warn(f'Could not load the diff file: {result}, {f.readlines()}')
command = f'nbdiff {reference_nb} {output_nb} --ignore-metadata --ignore-details --no-use-diff --no-git'
self.text_diff = run_command(command)
from ansi2html import Ansi2HTMLConverter
conv = Ansi2HTMLConverter()
self.text_diff = conv.convert(self.text_diff)
changes = len(self.diff[0]['diff']) if self.diff else 0
# TODO: count only the code cells, not markdown cells?
total_cells = len(notebook_json['cells'])
self.fidelity = (total_cells - changes) / total_cells * 100
if status == 0:
with open(cache_nb_file, 'wb') as f:
pickle.dump({
key: getattr(self, key)
for key in to_cache
}, f)
self.status = status
return status
def to_json(self):
notebook_name = Path(self.notebook).name
return {
'name': self.name,
'arguments': self.serialized_arguments,
'execution_time': self.execution_time,
'type': 'notebook',
'notebook': self.notebook,
'notebook_name': notebook_name,
'fidelity': self.fidelity,
'changes_this_month': self.changes,
'nice_time': nice_time(self.execution_time),
'diff': self.diff,
'text_diff': self.text_diff,
'images': self.images,
'label': self.notebook,
'headers': self.headers,
'status': self.status,
'todos': self.todos,
'group': self.group
# TODO: requires testing
# 'is_tracked': is_tracked_in_version_control(self.notebook)
}
def to_graphiz(self, changes=False):
data = self.to_json()
# TODO: move to static_graph
buttons = []
if changes: # TODO allow to activate
buttons += [f'<td href="{self.repository_url}/commits/master/{self.notebook}">{self.changes} changes this month</td>']
if self.fidelity is not None:
buttons += [f'<td href="">Reproducibility: {self.fidelity:.2f}%</td>']
if self.execution_time is not None:
buttons += [f'<td>Runtime: {nice_time(self.execution_time)}</td>']
buttons_html = '\n'.join(buttons)
if buttons_html:
buttons_html = f'<tr>{ buttons_html }</tr>'
return {
**data,
**{
'shape': 'plain',
'label': f"""<<table cellspacing="0">
<tr><td href="{self.repository_url}/blob/master/{self.notebook}" colspan="{len(buttons)}" title="{data['notebook_name']}">{self.name.replace('&', ' and ')}</td></tr>
</table>>"""
}
}
def is_tracked_in_version_control(file: str):
return check_output(f'git ls-files {file}', shell=True)
def discover_notebooks(root_path='.', ignore=None, ignored_dirs=None, only_tracked_in_git=False, ignore_prefixes=('__', '.')):
"""Useful when working with input/output auto-detection"""
ignored_dirs = ignored_dirs or set()
ignore = ignore or set()
names = {}
rules = []
from typing import Dict
groups: Dict[str, Group] = {}
for dirpath, _, files in walk(root_path):
dirs = dirpath.split(sep)[1:]
if any(dir.startswith('.') or dir in ignored_dirs for dir in dirs):
continue
for file in files:
if any(file.startswith(prefix) for prefix in ignore_prefixes):
continue
if not file.endswith('.ipynb'):
continue
if only_tracked_in_git and not is_tracked_in_version_control(file):
continue
path = sep.join(dirs + [file])
if path in ignore:
continue
name = file[:-6]
name = name[0] + name[1:].replace('_', ' ')
if name in names:
print(name, 'already registered', path, names[name])
else:
names[name] = path
group_id = sep.join(dirs) if dirs else None
rule = NotebookRule(name, notebook=path, group=group_id)
rules.append(rule)
if group_id and group_id not in groups:
groups[group_id] = Group(id=group_id, name=dirs[-1], parent=sep.join(dirs[:-1]))
return {
'rules': rules,
'groups': groups
}
| 36.281553 | 181 | 0.550085 | 19,145 | 0.853849 | 0 | 0 | 1,365 | 0.060878 | 0 | 0 | 5,900 | 0.263134 |
9303234b67903102cb5d3258f289b230610a2cf6 | 801 | py | Python | analyse_images.py | aalto-ui/SemanticCollage | 61b4f241aef6c029a634f0d70a2a35799db46076 | [
"MIT"
] | null | null | null | analyse_images.py | aalto-ui/SemanticCollage | 61b4f241aef6c029a634f0d70a2a35799db46076 | [
"MIT"
] | 4 | 2021-06-08T21:23:18.000Z | 2022-03-12T00:25:45.000Z | analyse_images.py | aalto-ui/SemanticCollage | 61b4f241aef6c029a634f0d70a2a35799db46076 | [
"MIT"
] | 1 | 2021-04-21T03:41:32.000Z | 2021-04-21T03:41:32.000Z | # encoding=utf8
from load_to_db import *
save_list = []
import random
import string
def randomString(url, stringLength=20):
"""Generate a random string of fixed length """
Letters = string.ascii_lowercase + string.ascii_uppercase + string.digits
url_split = url.split(".")
format_ = url_split[-1]
s = ''.join(random.choice(Letters) for i in range(stringLength)) + format_
if alreadyNameInDB(s):
randomString(url)
else:
return s
def alreadyNameInDB(name):
connection = psycopg2.connect(settings.DATABASE_CONNECTION_STRING)
# connection = psycopg2.connect(database='Inspiration', user='research')
cur = connection.cursor()
cur.execute("SELECT name FROM images WHERE name = '%s';".format(name, ))
return cur.fetchone() is not None
| 25.83871 | 78 | 0.694132 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 183 | 0.228464 |
93034a9a6233faadc660e34f44294b994aedc83c | 124 | py | Python | Eolymp_DSAWeek1_Solns/Digits.py | sulphatet/MiniProjectsndCodingProbs | a8f6c9c3993854cad9444800f9d1b1720a239b39 | [
"MIT"
] | 1 | 2022-02-24T08:29:00.000Z | 2022-02-24T08:29:00.000Z | Eolymp_DSAWeek1_Solns/Digits.py | sulphatet/MiniProjectsndCodingProbs | a8f6c9c3993854cad9444800f9d1b1720a239b39 | [
"MIT"
] | null | null | null | Eolymp_DSAWeek1_Solns/Digits.py | sulphatet/MiniProjectsndCodingProbs | a8f6c9c3993854cad9444800f9d1b1720a239b39 | [
"MIT"
] | null | null | null | num = int(input())
x = 0
if num == 0:
print(1)
exit()
while num != 0:
x +=1
num = num//10
print(x)
| 12.4 | 19 | 0.435484 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
9304179c9a7ef19d7cafd259a2cdab5634a69a04 | 786 | py | Python | stable_baselines_model_based_rl/wrapper/gym_step_handlers/continuous_mountain_car.py | micheltokic/stable_baselines_model_based_rl | 75bac906aeba69072878ceb15d9be459b1f436c3 | [
"Apache-2.0"
] | 1 | 2022-01-08T17:08:13.000Z | 2022-01-08T17:08:13.000Z | stable_baselines_model_based_rl/wrapper/gym_step_handlers/continuous_mountain_car.py | micheltokic/stable_baselines_model_based_rl | 75bac906aeba69072878ceb15d9be459b1f436c3 | [
"Apache-2.0"
] | 5 | 2021-09-15T18:14:48.000Z | 2021-09-19T16:17:51.000Z | stable_baselines_model_based_rl/wrapper/gym_step_handlers/continuous_mountain_car.py | micheltokic/stable_baselines_model_based_rl | 75bac906aeba69072878ceb15d9be459b1f436c3 | [
"Apache-2.0"
] | null | null | null | import math
from stable_baselines_model_based_rl.wrapper.step_handler import StepRewardDoneHandler
class ContinuousMountainCarStepHandler(StepRewardDoneHandler):
goal_position = 0.45
goal_velocity = 0
def get_done(self, step: int) -> bool:
s = self.observation.to_value_list()
position = s[0]
velocity = s[1]
# Convert a possible numpy bool to a Python bool.
return bool(
position >= self.goal_position and velocity >= self.goal_velocity
)
def get_reward(self, step: int) -> float:
action = self.action.to_value_list()
reward = 0
if self.get_done(step):
reward = 100.0
reward -= math.pow(action[0], 2) * 0.1
return reward
| 27.103448 | 87 | 0.614504 | 677 | 0.861323 | 0 | 0 | 0 | 0 | 0 | 0 | 50 | 0.063613 |
9304861c31e91f902b6f121f6ca51fab98031cbe | 3,305 | py | Python | src/mod_stats_by_aircraft/background_jobs/background_job.py | FGlazov/IL2Stats_ByAircraftMod | acf66eb3f31a7e789d61a5c60d32fd30dbcedf4b | [
"MIT"
] | null | null | null | src/mod_stats_by_aircraft/background_jobs/background_job.py | FGlazov/IL2Stats_ByAircraftMod | acf66eb3f31a7e789d61a5c60d32fd30dbcedf4b | [
"MIT"
] | null | null | null | src/mod_stats_by_aircraft/background_jobs/background_job.py | FGlazov/IL2Stats_ByAircraftMod | acf66eb3f31a7e789d61a5c60d32fd30dbcedf4b | [
"MIT"
] | null | null | null | from django.core.exceptions import FieldError
from django.db import ProgrammingError
from stats.models import Tour, Sortie
from django.db.models import Max
import config
RETRO_COMPUTE_FOR_LAST_TOURS = config.get_conf()['stats'].getint('retro_compute_for_last_tours')
if RETRO_COMPUTE_FOR_LAST_TOURS is None:
RETRO_COMPUTE_FOR_LAST_TOURS = 10
def get_tour_cutoff():
max_id = Tour.objects.aggregate(Max('id'))['id__max']
if max_id is None: # Edge case: No tour yet
return None
return max_id - RETRO_COMPUTE_FOR_LAST_TOURS
class BackgroundJob:
def __init__(self):
tour_cutoff = get_tour_cutoff()
try:
if tour_cutoff is not None:
self.work_left = self.query_find_sorties(get_tour_cutoff()).count() > 0
else:
self.work_left = False
except FieldError:
pass # Likely that update.cmd is running. Otherwise this will cause another error later on.
except ProgrammingError:
pass # Likely that update.cmd is running. Otherwise this will cause another error later on.
self.unlimited_work = False # Marker for a continuous job which always gets extra work.
"""Abstract class which represents a job to be done in the background in stats.cmd while there is no new mission
to be processed. This includes fixing corrupted data due to bugs, and retroactively computing aircraft stats, as
well as filling in missing fields which were added in an update to the aircraft stats system.
"""
def query_find_sorties(self, tour_cutoff):
"""
Finds the sorties which need to be worked on.
@param tour_cutoff The first tour that should be searched.
@returns A django QuerySet which will find all the Sorties which need to be processed for this job.
"""
print("[mod_stats_by_aircraft]: WARNING: Programing Error unimplemented background job query find.")
return Sortie.objects.none()
def compute_for_sortie(self, sortie):
"""
Does the necessary computations on a single sortie found by query_find_sorties.
@param sortie Sortie as found by query_find_sorties.
"""
print("[mod_stats_by_aircraft]: WARNING: Programing Error unimplemented background job one sortie.")
def log_update(self, to_compute):
"""
Message which shows a status update on how many sorties left to compute for this job.
Printed to stats.cmd output.
@param to_compute Nr of sorties found by compute_for_sortie method.
"""
return "[mod_stats_by_aircraft]: WARNING: Programming error, unimplemented logs starting method."
def log_done(self):
"""
Message shown when this job is done. Printed to stats.cmd output.
"""
return "[mod_stats_by_aircraft]: WARNING: Programming error, unimplemented logs done method."
def reset_relevant_fields(self, tour_cutoff):
"""
Optional method.
This is a job done before any new sortie is processed. Used in corrupted data fixing background jobs which have
to make sure that data is reset before a new mission is processed in, so that the relevant fields in new mission
are at least correct.
"""
pass
| 39.819277 | 120 | 0.69289 | 2,753 | 0.83298 | 0 | 0 | 0 | 0 | 0 | 0 | 2,036 | 0.616036 |
9306105a7d6f94715dda6856227c83b2b5fdd102 | 2,016 | py | Python | peakinvestigator/actions/run.py | jct197/PeakInvestigator-Python-SDK | 75d58d67208deff07b6e366e0b34d37570b46bf5 | [
"BSD-3-Clause"
] | null | null | null | peakinvestigator/actions/run.py | jct197/PeakInvestigator-Python-SDK | 75d58d67208deff07b6e366e0b34d37570b46bf5 | [
"BSD-3-Clause"
] | 4 | 2019-03-14T03:01:05.000Z | 2019-10-16T20:51:03.000Z | peakinvestigator/actions/run.py | jct197/PeakInvestigator-Python-SDK | 75d58d67208deff07b6e366e0b34d37570b46bf5 | [
"BSD-3-Clause"
] | 1 | 2019-01-05T03:50:02.000Z | 2019-01-05T03:50:02.000Z | ## -*- coding: utf-8 -*-
#
# Copyright (c) 2016, Veritomyx, Inc.
#
# This file is part of the Python SDK for PeakInvestigator
# (http://veritomyx.com) and is distributed under the terms
# of the BSD 3-Clause license.
from .base import BaseAction
class RunAction(BaseAction):
"""This class is used to make a RUN call to the PeakInvestigator
API. See https://peakinvestigator.veritomyx.com/api/#RUN.
It is constructed with a Fluent API because of the number of required
arguments.
"""
def __init__(self, version, username, password, jobID,
response_time_objective):
"""Constructor
"""
super(RunAction,self).__init__(version, username, password)
self._jobID = jobID
self._response_time_objective = response_time_objective
def with_files(self, *args, **kwds):
"""Specify the production and calibration data files using either
function arguments or keywords.
First try keywords. If those are missing, use args[0] for production and
args[1] for calibration, if it exists.
"""
if "production" in kwds:
self._production = kwds["production"]
else:
self._production = args[0]
if "calibration" in kwds:
self._calibration = kwds["calibration"]
elif len(args) == 2:
self._calibration = args[1]
return self
def build_query(self):
query = super(RunAction,self).build_query()
query["Action"] = "RUN"
query["Job"] = self._jobID
query["RTO"] = self._response_time_objective
query["InputFile"] = self._production
if hasattr(self, "_calibration"):
query["CalibrationFile"] = self._calibration
return query
@property
def job(self):
super(RunAction,self).precheck()
return self._data["Job"]
| 28 | 80 | 0.590278 | 1,762 | 0.874008 | 0 | 0 | 102 | 0.050595 | 0 | 0 | 862 | 0.427579 |
93067822138f982e5bd62fcfd10cc9b4ae612b62 | 3,757 | py | Python | .setup/bin/input_forum_data.py | zeez2030/Submitty | 7118944ff4adc6f15d76984eb10a1e862926d724 | [
"BSD-3-Clause"
] | 411 | 2016-06-14T20:52:25.000Z | 2022-03-31T21:20:25.000Z | .setup/bin/input_forum_data.py | KaelanWillauer/Submitty | cf9b6ceda15ec0a661e2ca81ea7864790094c64a | [
"BSD-3-Clause"
] | 5,730 | 2016-05-23T21:04:32.000Z | 2022-03-31T10:08:06.000Z | .setup/bin/input_forum_data.py | KaelanWillauer/Submitty | cf9b6ceda15ec0a661e2ca81ea7864790094c64a | [
"BSD-3-Clause"
] | 423 | 2016-09-22T21:11:30.000Z | 2022-03-29T18:55:28.000Z | #!/usr/bin/env python3
import os
import sys
import json
from datetime import datetime
from submitty_utils import dateutils
def generatePossibleDatabases():
current = dateutils.get_current_semester()
pre = 'submitty_' + current + '_'
path = "/var/local/submitty/courses/" + current
return [pre + name for name in sorted(os.listdir(path)) if os.path.isdir(path + "/" + name)]
if(__name__ == "__main__"):
num_args = len(sys.argv)
possible_databases = generatePossibleDatabases()
database = possible_databases[0]
if(num_args > 2):
print('Too many arguments. Use --help for help.')
sys.exit()
elif(num_args == 2):
if(sys.argv[1] == '--help' or sys.argv[1] == '-h'):
print('This tool can be used to test forum scalability -- pg_dump after execution to save the test data which can be sourced later.')
print('This tool takes in an optional argument: database, so an example usage is: `python3 input_forum_data.py submitty_f18_blank`')
print('Note this will delete forum data in the database you specify. The database will default to `submitty_f18_blank` if not specified.')
sys.exit()
elif(sys.argv[1] not in possible_databases):
print('Unknown argument: {:s}, use --help or -h for help.'.format(sys.argv[1]))
sys.exit()
database = sys.argv[1]
threads = abs(int(input("Enter number of threads (i.e. 1000): ").strip()))
posts = abs(int(input("Enter number of posts per thread (i.e. 20): ").strip()))
usr_path = "/usr/local/submitty"
settings = json.load(open(os.path.join(usr_path, ".setup", "submitty_conf.json")))
print("WARNING: This tool is going to delete data from the following tables:\n\tthreads\n\tposts\n\tforum_posts_history\n\tstudent_favorites\n\tviewed_responses\n\tthread_categories\n\tcategories_list")
answer = input("Do you agree for this data to be removed from {:s}? [yes/no]: ".format(database)).strip()
if(answer.lower() != "yes"):
print("Exiting...")
sys.exit()
variables = (settings['database_password'], settings['database_host'], settings['database_user'], database)
os.system("""PGPASSWORD='{}' psql --host={} --username={} --dbname={} -c \"TRUNCATE TABLE threads RESTART IDENTITY CASCADE\" > /dev/null""".format(*variables))
os.system("""PGPASSWORD='{}' psql --host={} --username={} --dbname={} -c \"DELETE FROM thread_categories\" > /dev/null""".format(*variables))
os.system("""PGPASSWORD='{}' psql --host={} --username={} --dbname={} -c \"TRUNCATE TABLE categories_list RESTART IDENTITY CASCADE\" > /dev/null""".format(*variables))
os.system("""PGPASSWORD='{}' psql --host={} --username={} --dbname={} -c \"INSERT INTO categories_list (category_desc) VALUES ('TESTDATA')\" > /dev/null""".format(*variables))
print()
for i in range(threads):
if((i+1) % 10 == 0):
print("Completed: {:d}/{:d}".format(i+1, threads))
os.system("""PGPASSWORD='{}' psql --host={} --username={} --dbname={} -c \"INSERT INTO threads (title, created_by, pinned, deleted, merged_thread_id, merged_post_id, is_visible) VALUES (\'{:s}\', \'{:s}\', false, false, -1, -1, true)\" > /dev/null""".format(*variables, "Thread{:d}".format(i+1), "aphacker"))
os.system("""PGPASSWORD='{}' psql --host={} --username={} --dbname={} -c \"INSERT INTO thread_categories (thread_id, category_id) VALUES ({:d}, 1)\" > /dev/null""".format(*variables, i+1))
for pid in range(posts):
os.system("""PGPASSWORD='{}' psql --host={} --username={} --dbname={} -c \"INSERT INTO posts (thread_id, parent_id, author_user_id, content, timestamp, anonymous, deleted, type, has_attachment) VALUES ({}, {}, {}, {}, \'{}\', false, false, 0, false)\" > /dev/null""".format(*variables, i+1, -1 if pid == 0 else i*posts + pid, "'aphacker'", "'Post{:d}'".format(i*posts + pid+1), datetime.now()))
| 51.465753 | 398 | 0.672345 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2,265 | 0.602875 |
93074d51c0af568361ff79fea1825596f6e8553e | 252 | py | Python | edbdeploy/spec/__init__.py | mw2q/postgres-deployment | 7b07375588da6ede25062c5518922275ed35844b | [
"BSD-3-Clause"
] | null | null | null | edbdeploy/spec/__init__.py | mw2q/postgres-deployment | 7b07375588da6ede25062c5518922275ed35844b | [
"BSD-3-Clause"
] | 1 | 2021-07-01T18:02:00.000Z | 2021-07-01T18:02:00.000Z | edbdeploy/spec/__init__.py | jt-edb/postgres-deployment | 871cf517379152f096f44d682dd03d93971715a2 | [
"BSD-3-Clause"
] | null | null | null | class SpecValidator:
def __init__(self, type=None, default=None, choices=[], min=None,
max=None):
self.type = type
self.default = default
self.choices = choices
self.min = min
self.max = max
| 28 | 69 | 0.559524 | 251 | 0.996032 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
930766e7b8ffd5a77cf2414fe0de6b57a69af041 | 48 | py | Python | levelpy/async/__init__.py | rch/levelpy | 59e546854b3d478f3467bd573eb5f6b4da62d239 | [
"MIT"
] | 4 | 2015-11-06T22:50:22.000Z | 2020-05-31T14:49:58.000Z | levelpy/async/__init__.py | rch/levelpy | 59e546854b3d478f3467bd573eb5f6b4da62d239 | [
"MIT"
] | null | null | null | levelpy/async/__init__.py | rch/levelpy | 59e546854b3d478f3467bd573eb5f6b4da62d239 | [
"MIT"
] | 3 | 2017-01-25T22:26:40.000Z | 2021-03-24T07:49:33.000Z | #
# levelpy/async/__init__.py
#
import asyncio
| 8 | 27 | 0.729167 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 29 | 0.604167 |
9308413560c1330a48aacec8d18de1204b19be47 | 751 | py | Python | app/domain/messaging/tests/test_models.py | anthon-alindada/sanic_messaging | 2afbc601790b4a3dbe17e0a95c589412250d8bee | [
"MIT"
] | 1 | 2019-06-21T08:51:01.000Z | 2019-06-21T08:51:01.000Z | app/domain/messaging/tests/test_models.py | anthon-alindada/sanic_messaging | 2afbc601790b4a3dbe17e0a95c589412250d8bee | [
"MIT"
] | 6 | 2021-03-09T07:47:26.000Z | 2022-02-26T11:33:25.000Z | app/domain/messaging/tests/test_models.py | anthon-alindada/sanic_messaging | 2afbc601790b4a3dbe17e0a95c589412250d8bee | [
"MIT"
] | null | null | null | # -*- coding: utf-8
# Models
from ..models import Channel, ChannelUser, Message
async def test_channel_model(channel_data):
channel = Channel(
owner_id=1,
name='General')
channel = await channel.create()
assert repr(channel) == "<Channel: 'General'>"
async def test_channel_user_model(channel_data):
channel_user = ChannelUser(
user_id=1,
channel_id=1)
channel_user = await channel_user.create()
assert repr(channel_user) == "<ChannelUser: 1 1>"
async def test_message_model(message_data):
message = Message(
author_id=1,
channel_id=1,
content='General')
message = await message.create()
assert repr(message) == "<Message: {}>".format(message.id)
| 21.457143 | 62 | 0.653795 | 0 | 0 | 0 | 0 | 0 | 0 | 662 | 0.881491 | 102 | 0.135819 |
93095d98ffed279b01860ac139ac53ed5312a8e1 | 206 | py | Python | aiosnow/models/__init__.py | michaeldcanady/aiosnow | db515b1560d651fc7696a184990c2a2d68db8961 | [
"MIT"
] | 38 | 2020-08-03T17:58:48.000Z | 2022-03-30T19:39:24.000Z | aiosnow/models/__init__.py | michaeldcanady/aiosnow | db515b1560d651fc7696a184990c2a2d68db8961 | [
"MIT"
] | 34 | 2020-01-20T10:11:46.000Z | 2020-06-05T21:25:23.000Z | aiosnow/models/__init__.py | michaeldcanady/aiosnow | db515b1560d651fc7696a184990c2a2d68db8961 | [
"MIT"
] | 5 | 2021-03-26T19:35:20.000Z | 2022-01-23T20:09:55.000Z | from ._base import BaseModel, BaseModelMeta, BaseTableModel
from ._schema import BaseField, ModelSchema, ModelSchemaMeta, Pluck, fields
from .attachment import AttachmentModel
from .table import TableModel
| 41.2 | 75 | 0.84466 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
930a3159a270d17b3e016e6419675cf9059057cd | 506 | py | Python | examples/providers/factory_aggregate/prototype.py | kinow/python-dependency-injector | ebd98bebe9a8fc0b57e68cfc12c4979833baa6a5 | [
"BSD-3-Clause"
] | null | null | null | examples/providers/factory_aggregate/prototype.py | kinow/python-dependency-injector | ebd98bebe9a8fc0b57e68cfc12c4979833baa6a5 | [
"BSD-3-Clause"
] | null | null | null | examples/providers/factory_aggregate/prototype.py | kinow/python-dependency-injector | ebd98bebe9a8fc0b57e68cfc12c4979833baa6a5 | [
"BSD-3-Clause"
] | null | null | null | """FactoryAggregate provider prototype."""
class FactoryAggregate:
"""FactoryAggregate provider prototype."""
def __init__(self, **factories):
"""Initialize instance."""
self.factories = factories
def __call__(self, factory_name, *args, **kwargs):
"""Create object."""
return self.factories[factory_name](*args, **kwargs)
def __getattr__(self, factory_name):
"""Return factory with specified name."""
return self.factories[factory_name]
| 28.111111 | 60 | 0.656126 | 460 | 0.909091 | 0 | 0 | 0 | 0 | 0 | 0 | 171 | 0.337945 |
930b6911eb3b99feb00dad0f3f43696dfc1d21e0 | 27,385 | py | Python | test_cnlunardate.py | YuBPan/cnlunardate | c19aa9821aecadea6647ac026bf233ab9f7cff90 | [
"MIT"
] | null | null | null | test_cnlunardate.py | YuBPan/cnlunardate | c19aa9821aecadea6647ac026bf233ab9f7cff90 | [
"MIT"
] | 1 | 2020-01-10T05:06:27.000Z | 2020-01-10T05:06:27.000Z | test_cnlunardate.py | YuBPan/cnlunardate | c19aa9821aecadea6647ac026bf233ab9f7cff90 | [
"MIT"
] | null | null | null | """Test cnlunardate."""
import unittest
import pickle
from cnlunardate import cnlunardate
from cnlunardate import MIN_YEAR, MAX_YEAR
from datetime import timedelta
pickle_loads = {pickle.loads, pickle._loads}
pickle_choices = [(pickle, pickle, proto)
for proto in range(pickle.HIGHEST_PROTOCOL + 1)]
assert len(pickle_choices) == pickle.HIGHEST_PROTOCOL + 1
class TestCnlunardateOnly(unittest.TestCase):
def test_delta_non_days_ignored(self):
dt = cnlunardate(2000, 1, 2)
delta = timedelta(days=1)
days = timedelta(delta.days)
self.assertEqual(days, timedelta(1))
dt2 = dt + delta
self.assertEqual(dt2, dt + days)
dt2 = delta + dt
self.assertEqual(dt2, dt + days)
dt2 = dt - delta
self.assertEqual(dt2, dt - days)
delta = -delta
days = timedelta(delta.days)
self.assertEqual(days, timedelta(-1))
dt2 = dt + delta
self.assertEqual(dt2, dt + days)
dt2 = delta + dt
self.assertEqual(dt2, dt + days)
dt2 = dt - delta
self.assertEqual(dt2, dt - days)
class SubclassDate(cnlunardate):
sub_var = 1
class TestCnlunardate(unittest.TestCase):
theclass = cnlunardate
def test_basic_attributes(self):
dt = self.theclass(2017, 6, 1, True)
self.assertEqual(dt.year, 2017)
self.assertEqual(dt.month, 6)
self.assertEqual(dt.day, 1)
self.assertEqual(dt.isLeapMonth, True)
def test_roundtrip(self):
for dt in (self.theclass(1900, 1, 1),
self.theclass.today()):
# Verify dt -> string -> cnlunardate identity.
s = repr(dt)
self.assertTrue(s.startswith("cnlunardate."))
s = s[len("cnlunardate."):]
dt2 = eval(s)
self.assertEqual(dt, dt2)
# Verify identity via reconstructing from pieces.
dt2 = self.theclass(dt.year, dt.month, dt.day)
self.assertEqual(dt, dt2)
def test_ordinal_conversions(self):
# Check some fixed values.
for y, m, d, n in [(1900, 1, 1, 693626), # 1900, 1, 31
(1945, 10, 8, 710347), # 1945, 11, 12
(2100, 12, 1, 767009)]: # 2100, 12, 31
d = self.theclass(y, m, d)
self.assertEqual(n, d.toordinal())
fromord = self.theclass.fromordinal(n)
self.assertEqual(d, fromord)
# Check first and last days of year spottily across the whole
# range of years supported.
for year in range(MIN_YEAR+1, MAX_YEAR+1, 7):
# Verify (year, 1, 1) -> ordinal -> y, m, d, l is identity.
d = self.theclass(year, 1, 1)
n = d.toordinal()
d2 = self.theclass.fromordinal(n)
self.assertEqual(d, d2)
# Verify that moving back a day gets to the end of year-1.
if year > 1:
d = self.theclass.fromordinal(n-1)
try:
d2 = self.theclass(year-1, 12, 30)
except ValueError:
d2 = self.theclass(year-1, 12, 29)
self.assertEqual(d, d2)
self.assertEqual(d2.toordinal(), n-1)
# Test every day in a year with and without leap month.
for year, dim, hasLeapMonth, leapMonth, leapMonthDays in \
(2017, [29, 30, 29, 30, 29, 29, 29, 30, 29, 30, 30, 30], True, 6, 30), \
(2018, [29, 30, 29, 30, 29, 29, 30, 29, 30, 29, 30, 30], False, -1, -1):
n = self.theclass(year, 1, 1).toordinal()
for month, maxday in zip(range(1, len(dim)+1), dim):
for day in range(1, maxday+1):
d = self.theclass(year, month, day)
self.assertEqual(d.toordinal(), n)
self.assertEqual(d, self.theclass.fromordinal(n))
n += 1
if hasLeapMonth and month == leapMonth:
for day in range(1, leapMonthDays+1):
d = self.theclass(year, month, day, True)
self.assertEqual(d.toordinal(), n)
self.assertEqual(d, self.theclass.fromordinal(n))
n += 1
def test_extreme_ordinals(self):
a = self.theclass.min
a = self.theclass(a.year, a.month, a.day)
aord = a.toordinal()
b = a.fromordinal(aord)
self.assertEqual(a, b)
self.assertRaises(ValueError, lambda: a.fromordinal(aord - 1))
b = a + timedelta(days=1)
self.assertEqual(b.toordinal(), aord + 1)
self.assertEqual(b, self.theclass.fromordinal(aord + 1))
a = self.theclass.max
a = self.theclass(a.year, a.month, a.day)
aord = a.toordinal()
b = a.fromordinal(aord)
self.assertEqual(a, b)
self.assertRaises(ValueError, lambda: a.fromordinal(aord + 1))
b = a - timedelta(days=1)
self.assertEqual(b.toordinal(), aord - 1)
self.assertEqual(b, self.theclass.fromordinal(aord - 1))
def test_bad_constructor_arguments(self):
# missing arguments
self.assertRaises(TypeError, self.theclass)
self.assertRaises(TypeError, self.theclass, MIN_YEAR)
self.assertRaises(TypeError, self.theclass, MIN_YEAR, 1)
# bad years
self.theclass(MIN_YEAR, 1, 1)
self.theclass(MAX_YEAR, 1, 1)
self.assertRaises(ValueError, self.theclass, MIN_YEAR-1, 1, 1)
self.assertRaises(ValueError, self.theclass, MAX_YEAR+1, 1, 1)
# bad months
self.theclass(2017, 1, 1)
self.theclass(2017, 12, 1)
self.assertRaises(ValueError, self.theclass, 2017, 0, 1)
self.assertRaises(ValueError, self.theclass, 2017, 13, 1)
# bad days
self.theclass(2017, 1, 29)
self.theclass(2017, 6, 29)
self.theclass(2017, 6, 30, True)
self.assertRaises(ValueError, self.theclass, 2017, 1, 0)
self.assertRaises(ValueError, self.theclass, 2017, 1, 30)
self.assertRaises(ValueError, self.theclass, 2017, 6, 30)
self.assertRaises(ValueError, self.theclass, 2017, 6, 31, True)
# bad isLeapMonth
self.theclass(2017, 1, 1)
self.assertRaises(ValueError, self.theclass, 2017, 1, 1, True)
self.assertRaises(ValueError, self.theclass, 2017, 6, 30)
# min and max
self.theclass(MIN_YEAR, 1, 1)
self.theclass(MAX_YEAR, 12, 1)
self.assertRaises(ValueError, self.theclass, MIN_YEAR-1, 12, 30)
self.assertRaises(ValueError, self.theclass, MAX_YEAR, 12, 2)
def test_bad_constructor_arguments_typeerror(self):
# non-expected arguments
self.assertRaises(TypeError, self.theclass, 2017.0, 1, 1)
self.assertRaises(TypeError, self.theclass, 2017, 1.0, 1)
self.assertRaises(TypeError, self.theclass, 2017, 1, 1.0)
self.assertRaises(TypeError, self.theclass, 2017, 1, 1, "non-bool type")
# int __index__
class IntIndex:
def __init__(self, i):
self.i = i
def __index__(self):
return self.i
self.theclass(IntIndex(2017), 1, 1)
self.theclass(2017, IntIndex(1), 1)
self.theclass(2017, 1, IntIndex(1))
# non-int __index__
class NonIntIndex:
def __index__(self):
return 1.0
arg = NonIntIndex()
self.assertRaises(TypeError, self.theclass, arg, 1, 1)
self.assertRaises(TypeError, self.theclass, 1, arg, 1)
self.assertRaises(TypeError, self.theclass, 1, 1, arg)
# int __int__
class IntInt:
def __init__(self, i):
self.i = i
def __int__(self):
return self.i
self.theclass(IntInt(2017), 1, 1)
self.theclass(2017, IntInt(1), 1)
self.theclass(2017, 1, IntInt(1))
# non-int __int__
class NonIntInt:
def __int__(self):
return 1.0
arg = NonIntInt()
self.assertRaises(TypeError, self.theclass, arg, 1, 1)
self.assertRaises(TypeError, self.theclass, 1, arg, 1)
self.assertRaises(TypeError, self.theclass, 1, 1, arg)
# bool __bool__
class BoolBool:
def __bool__(self):
return True
self.theclass(2017, 6, 1, BoolBool())
# non-bool __bool__
class NonBoolBool:
def __bool__(self):
return 1.0
arg = NonBoolBool()
self.assertRaises(TypeError, self.theclass, 2017, 6, 1, arg)
def test_hash_equality(self):
d = self.theclass(2017, 1, 1)
# same thing
e = self.theclass(2017, 1, 1)
self.assertEqual(d, e)
self.assertEqual(hash(d), hash(e))
dic = {d: 1}
dic[e] = 2
self.assertEqual(len(dic), 1)
self.assertEqual(dic[d], 2)
self.assertEqual(dic[e], 2)
d = self.theclass(2017, 6, 30, True)
# same thing
e = self.theclass(2017, 6, 30, True)
self.assertEqual(d, e)
self.assertEqual(hash(d), hash(e))
dic = {d: 1}
dic[e] = 2
self.assertEqual(len(dic), 1)
self.assertEqual(dic[d], 2)
self.assertEqual(dic[e], 2)
# different on isLeapMonth
self.assertNotEqual(hash(self.theclass(2017, 6, 29)),
hash(self.theclass(2017, 6, 29, True)))
def test_computations(self):
a = self.theclass(2002, 1, 30)
b = self.theclass(1956, 1, 29)
c = self.theclass(2001, 2, 1)
diff = a-b
self.assertEqual(diff.days, 16803)
self.assertEqual(diff.seconds, 0)
self.assertEqual(diff.microseconds, 0)
day = timedelta(1)
week = timedelta(7)
a = self.theclass(2002, 3, 2)
self.assertEqual(a + day, self.theclass(2002, 3, 3))
self.assertEqual(day + a, self.theclass(2002, 3, 3))
self.assertEqual(a - day, self.theclass(2002, 3, 1))
self.assertEqual(-day + a, self.theclass(2002, 3, 1))
self.assertEqual(a + week, self.theclass(2002, 3, 9))
self.assertEqual(a - week, self.theclass(2002, 2, 25))
self.assertEqual(a + 52*week, self.theclass(2003, 3, 12))
self.assertEqual(a - 52*week, self.theclass(2001, 3, 22))
self.assertEqual((a + week) - a, week)
self.assertEqual((a + day) - a, day)
self.assertEqual((a - week) - a, -week)
self.assertEqual((a - day) - a, -day)
self.assertEqual(a - (a + week), -week)
self.assertEqual(a - (a + day), -day)
self.assertEqual(a - (a - week), week)
self.assertEqual(a - (a - day), day)
self.assertEqual(c - (c - day), day)
a = self.theclass(2017, 6, 2, True)
self.assertEqual(a + day, self.theclass(2017, 6, 3, True))
self.assertEqual(day + a, self.theclass(2017, 6, 3, True))
self.assertEqual(a - day, self.theclass(2017, 6, 1, True))
self.assertEqual(-day + a, self.theclass(2017, 6, 1, True))
self.assertEqual(a + week, self.theclass(2017, 6, 9, True))
self.assertEqual(a - week, self.theclass(2017, 6, 24))
self.assertEqual(a + 52*week, self.theclass(2018, 6, 11))
self.assertEqual(a - 52*week, self.theclass(2016, 6, 22))
self.assertEqual((a + week) - a, week)
self.assertEqual((a + day) - a, day)
self.assertEqual((a - week) - a, -week)
self.assertEqual((a - day) - a, -day)
self.assertEqual(a - (a + week), -week)
self.assertEqual(a - (a + day), -day)
self.assertEqual(a - (a - week), week)
self.assertEqual(a - (a - day), day)
self.assertEqual(c - (c - day), day)
# Add/sub ints or floats should be illegal
for i in 1, 1.0:
self.assertRaises(TypeError, lambda: a+i)
self.assertRaises(TypeError, lambda: a-i)
self.assertRaises(TypeError, lambda: i+a)
self.assertRaises(TypeError, lambda: i-a)
# delta - cnlunardate is senseless.
self.assertRaises(TypeError, lambda: day - a)
# mixing cnlunardate and (delta or cnlunardate) via * or // is senseless
self.assertRaises(TypeError, lambda: day * a)
self.assertRaises(TypeError, lambda: a * day)
self.assertRaises(TypeError, lambda: day // a)
self.assertRaises(TypeError, lambda: a // day)
self.assertRaises(TypeError, lambda: a * a)
self.assertRaises(TypeError, lambda: a // a)
# cnlunardate + cnlunardate is senseless
self.assertRaises(TypeError, lambda: a + a)
def test_overflow(self):
tiny = self.theclass.resolution
for delta in [tiny, timedelta(1), timedelta(2)]:
dt = self.theclass.min + delta
dt -= delta # no problem
self.assertRaises(OverflowError, dt.__sub__, delta)
self.assertRaises(OverflowError, dt.__add__, -delta)
dt = self.theclass.max - delta
dt += delta # no problem
self.assertRaises(OverflowError, dt.__add__, delta)
self.assertRaises(OverflowError, dt.__sub__, -delta)
def test_fromtimestamp(self):
import time
# Try an arbitrary fixed value.
ts = time.mktime((1999, 9, 19, 0, 0, 0, 0, 0, -1))
d = self.theclass.fromtimestamp(ts)
self.assertEqual(d.year, 1999)
self.assertEqual(d.month, 8)
self.assertEqual(d.day, 10)
self.assertEqual(d.isLeapMonth, False)
def test_insane_fromtimestamp(self):
# It's possible that some platform maps time_t to double,
# and that this test will fail there. This test should
# exempt such platforms (provided they return reasonable
# results!).
for insane in -1e200, 1e200:
self.assertRaises(OverflowError, self.theclass.fromtimestamp,
insane)
def test_today(self):
import time
# We claim that today() is like fromtimestamp(time.time()), so
# prove it.
for dummy in range(3):
today = self.theclass.today()
ts = time.time()
todayagain = self.theclass.fromtimestamp(ts)
if today == todayagain:
break
# There are several legit reasons that could fail:
# 1. It recently became midnight, between the today() and the
# time() calls.
# 2. The platform time() has such fine resolution that we'll
# never get the same value twice.
# 3. The platform time() has poor resolution, and we just
# happened to call today() right before a resolution quantum
# boundary.
# 4. The system clock got fiddled between calls.
# In any case, wait a little while and try again.
time.sleep(0.1)
# It worked or it didn't. If it didn't, assume it's reason #2, and
# let the test pass if they're within half a second of each other.
if today != todayagain:
self.assertAlmostEqual(todayagain, today,
delta=timedelta(seconds=0.5))
def test_weekday(self):
for i in range(7):
# 2017, 6, 2 is a Monday
self.assertEqual(self.theclass(2017, 6, 2+i, True).weekday(), i)
self.assertEqual(self.theclass(
2017, 6, 2+i, True).isoweekday(), i+1)
# 2017, 1, 3 is a Monday
self.assertEqual(self.theclass(2017, 1, 3+i).weekday(), i)
self.assertEqual(self.theclass(2017, 1, 3+i).isoweekday(), i+1)
def test_isocalendar(self):
# Check examples from
# http://www.phys.uu.nl/~vgent/calendar/isocalendar.htm
for i in range(7):
d = self.theclass(2003, 11, 22+i)
self.assertEqual(d.isocalendar(), (2003, 51, i+1))
d = self.theclass(2003, 11, 29) + timedelta(i)
self.assertEqual(d.isocalendar(), (2003, 52, i+1))
d = self.theclass(2003, 12, 7+i)
self.assertEqual(d.isocalendar(), (2004, 1, i+1))
d = self.theclass(2003, 12, 14+i)
self.assertEqual(d.isocalendar(), (2004, 2, i+1))
d = self.theclass(2009, 11, 6+i)
self.assertEqual(d.isocalendar(), (2009, 52, i+1))
d = self.theclass(2009, 11, 13+i)
self.assertEqual(d.isocalendar(), (2009, 53, i+1))
d = self.theclass(2009, 11, 20+i)
self.assertEqual(d.isocalendar(), (2010, 1, i+1))
def test_iso_long_years(self):
from datetime import date
# Calculate long ISO years and compare to table from
# http://www.phys.uu.nl/~vgent/calendar/isocalendar.htm
ISO_LONG_YEARS_TABLE = """
4 32 60 88
9 37 65 93
15 43 71 99
20 48 76
26 54 82
303 331 359 387
308 336 364 392
314 342 370 398
320 348 376
325 353 381
"""
iso_long_years = sorted(map(int, ISO_LONG_YEARS_TABLE.split()))
L = []
for i in range(101):
d = self.theclass.fromsolardate(date(MIN_YEAR+i, 12, 31))
if d.isocalendar()[1] == 53:
L.append(i + MIN_YEAR - 1600)
d = self.theclass.fromsolardate(date(2000+i, 12, 31))
if d.isocalendar()[1] == 53:
L.append(i)
self.assertEqual(sorted(L), iso_long_years)
def test_resolution_info(self):
self.assertIsInstance(self.theclass.min, cnlunardate)
self.assertIsInstance(self.theclass.max, cnlunardate)
self.assertIsInstance(self.theclass.resolution, timedelta)
self.assertTrue(self.theclass.max > self.theclass.min)
def test_extreme_timedelta(self):
big = self.theclass.max - self.theclass.min
n = (big.days*24*3600 + big.seconds)*1000000 + big.microseconds
justasbig = timedelta(0, 0, n)
self.assertEqual(big, justasbig)
self.assertEqual(self.theclass.min + big, self.theclass.max)
self.assertEqual(self.theclass.max - big, self.theclass.min)
def test_from_to_solardate(self):
from datetime import date
for y1, m1, d1, y2, m2, d2 in [ (1900, 1, 1, 1900, 1, 31),
(1945, 10, 8, 1945, 11, 12),
(2100, 12, 1, 2100, 12, 31)]:
d = self.theclass(y1, m1, d1)
solar = date(y2, m2, d2)
fromsolar = self.theclass.fromsolardate(solar)
self.assertEqual(d, fromsolar)
tosolar = self.theclass.tosolardate(d)
self.assertEqual(solar, tosolar)
def test_timetuple(self):
for i in range(7):
# January 2, 1956 is a Monday (0)
d = self.theclass(1955, 11, 20+i)
t = d.timetuple()
self.assertEqual(t, (1956, 1, 2+i, 0, 0, 0, i, 2+i, -1))
# February 1, 1956 is a Wednesday (2)
d = self.theclass(1955, 12, 20+i)
t = d.timetuple()
self.assertEqual(t, (1956, 2, 1+i, 0, 0, 0, (2+i) % 7, 32+i, -1))
# March 1, 1956 is a Thursday (3), and is the 31+29+1 = 61st day
# of the year.
d = self.theclass(1956, 1, 19+i)
t = d.timetuple()
self.assertEqual(t, (1956, 3, 1+i, 0, 0, 0, (3+i) % 7, 61+i, -1))
self.assertEqual(t.tm_year, 1956)
self.assertEqual(t.tm_mon, 3)
self.assertEqual(t.tm_mday, 1+i)
self.assertEqual(t.tm_hour, 0)
self.assertEqual(t.tm_min, 0)
self.assertEqual(t.tm_sec, 0)
self.assertEqual(t.tm_wday, (3+i) % 7)
self.assertEqual(t.tm_yday, 61+i)
self.assertEqual(t.tm_isdst, -1)
def test_pickling(self):
args = 2015, 11, 27
orig = self.theclass(*args)
for pickler, unpickler, proto in pickle_choices:
green = pickler.dumps(orig, proto)
derived = unpickler.loads(green)
self.assertEqual(orig, derived)
self.assertEqual(orig.__reduce__(), orig.__reduce_ex__(2))
def test_compat_unpickle(self):
tests = [
b"ccnlunardate\ncnlunardate\n(S'\\x07\\xdf\\x0b\\x1b\\x00'\ntR.",
b"ccnlunardate\ncnlunardate\n(U\x05\x07\xdf\x0b\x1b\x00tR.",
b"\x80\x02ccnlunardate\ncnlunardate\nU\x05\x07\xdf\x0b\x1b\x00\x85R.",
]
args = 2015, 11, 27
expected = self.theclass(*args)
for data in tests:
for loads in pickle_loads:
derived = loads(data, encoding="latin1")
self.assertEqual(derived, expected)
def test_compare(self):
t1 = self.theclass(2017, 6, 4)
t2 = self.theclass(2017, 6, 4)
self.assertEqual(t1, t2)
self.assertTrue(t1 <= t2)
self.assertTrue(t1 >= t2)
self.assertFalse(t1 != t2)
self.assertFalse(t1 < t2)
self.assertFalse(t1 > t2)
for args in (2018, 6, 3), (2017, 6, 4, True), (2017, 7, 4), (2017, 6, 5):
t2 = self.theclass(*args) # this is larger than t1
self.assertTrue(t1 < t2)
self.assertTrue(t2 > t1)
self.assertTrue(t1 <= t2)
self.assertTrue(t2 >= t1)
self.assertTrue(t1 != t2)
self.assertTrue(t2 != t1)
self.assertFalse(t1 == t2)
self.assertFalse(t2 == t1)
self.assertFalse(t1 > t2)
self.assertFalse(t2 < t1)
self.assertFalse(t1 >= t2)
self.assertFalse(t2 <= t1)
for badarg in (10, 34.5, "abc", {}, [], ()):
self.assertEqual(t1 == badarg, False)
self.assertEqual(t1 != badarg, True)
self.assertEqual(badarg == t1, False)
self.assertEqual(badarg != t1, True)
self.assertRaises(TypeError, lambda: t1 < badarg)
self.assertRaises(TypeError, lambda: t1 > badarg)
self.assertRaises(TypeError, lambda: t1 >= badarg)
self.assertRaises(TypeError, lambda: badarg <= t1)
self.assertRaises(TypeError, lambda: badarg < t1)
self.assertRaises(TypeError, lambda: badarg > t1)
self.assertRaises(TypeError, lambda: badarg >= t1)
def test_mixed_compare(self):
our = self.theclass(2000, 4, 5)
# Our class can be compared for equality to other classes
self.assertEqual(our == 1, False)
self.assertEqual(1 == our, False)
self.assertEqual(our != 1, True)
self.assertEqual(1 != our, True)
# But the ordering is undefined
self.assertRaises(TypeError, lambda: our < 1)
self.assertRaises(TypeError, lambda: 1 < our)
# Repeat those tests with a different class
class SomeClass:
pass
their = SomeClass()
self.assertEqual(our == their, False)
self.assertEqual(their == our, False)
self.assertEqual(our != their, True)
self.assertEqual(their != our, True)
self.assertRaises(TypeError, lambda: our < their)
self.assertRaises(TypeError, lambda: their < our)
def test_bool(self):
# All cnlunardates are considered true.
self.assertTrue(self.theclass.min)
self.assertTrue(self.theclass.max)
def test_replace(self):
cls = self.theclass
args = [2017, 6, 5, False]
base = cls(*args)
self.assertEqual(base, base.replace())
i = 0
for name, newval in (("year", 2018),
("month", 7),
("day", 6),
("isLeapMonth", True)):
newargs = args[:]
newargs[i] = newval
expected = cls(*newargs)
got = base.replace(**{name: newval})
self.assertEqual(expected, got)
i += 1
base = cls(2016, 1, 30)
# Day is out of bounds.
self.assertRaises(ValueError, base.replace, year=2017)
# IsLeapMonth is wrong.
self.assertRaises(ValueError, base.replace, isLeapMonth=True)
def test_subclass_replace(self):
class DateSubclass(self.theclass):
pass
dt = DateSubclass(2012, 1, 1)
self.assertIs(type(dt.replace(year=2013)), DateSubclass)
def test_subclass_cnlunardate(self):
class C(self.theclass):
theAnswer = 42
def __new__(cls, *args, **kws):
temp = kws.copy()
extra = temp.pop("extra")
result = self.theclass.__new__(cls, *args, **temp)
result.extra = extra
return result
def newmeth(self, start):
return start + self.year + self.month
args = 2003, 4, 14
dt1 = self.theclass(*args)
dt2 = C(*args, **{"extra": 7})
self.assertEqual(dt2.__class__, C)
self.assertEqual(dt2.theAnswer, 42)
self.assertEqual(dt2.extra, 7)
self.assertEqual(dt1.toordinal(), dt2.toordinal())
self.assertEqual(dt2.newmeth(-7), dt1.year + dt1.month - 7)
def test_subclass_alternate_constructors(self):
from datetime import datetime, date, time
# Test that alternate constructors call the constructor
class DateSubclass(self.theclass):
def __new__(cls, *args, **kwargs):
result = self.theclass.__new__(cls, *args, **kwargs)
result.extra = 7
return result
args = (2003, 3, 13)
d_date = date(2003, 4, 14) # Equivalent solar date
d_ord = 731319 # Equivalent ordinal
base_d = DateSubclass(*args)
self.assertIsInstance(base_d, DateSubclass)
self.assertEqual(base_d.extra, 7)
# Timestamp depends on time zone, so we'll calculate the equivalent here
ts = datetime.combine(d_date, time(0)).timestamp()
test_cases = [
("fromsolardate", (d_date,)),
("fromordinal", (d_ord,)),
("fromtimestamp", (ts,)),
]
for constr_name, constr_args in test_cases:
for base_obj in (DateSubclass, base_d):
# Test both the classmethod and method
with self.subTest(base_obj_type=type(base_obj),
constr_name=constr_name):
constr = getattr(base_obj, constr_name)
dt = constr(*constr_args)
# Test that it creates the right subclass
self.assertIsInstance(dt, DateSubclass)
# Test that it's equal to the base object
self.assertEqual(dt, base_d)
# Test that it called the constructor
self.assertEqual(dt.extra, 7)
def test_pickling_subclass_date(self):
args = 2006, 7, 23
orig = SubclassDate(*args)
for pickler, unpickler, proto in pickle_choices:
green = pickler.dumps(orig, proto)
derived = unpickler.loads(green)
self.assertEqual(orig, derived)
| 38.843972 | 88 | 0.560964 | 26,996 | 0.985795 | 0 | 0 | 0 | 0 | 0 | 0 | 3,373 | 0.12317 |
930cba7bd9d0b2f6492d68c76893da92e46a129b | 756 | py | Python | sloc_report/sloc_time.py | depop/sloc_report | 0e65ece3f5c95e6ab38313b38c110fea275326b2 | [
"MIT"
] | 1 | 2016-11-08T13:57:42.000Z | 2016-11-08T13:57:42.000Z | sloc_report/sloc_time.py | depop/sloc_report | 0e65ece3f5c95e6ab38313b38c110fea275326b2 | [
"MIT"
] | null | null | null | sloc_report/sloc_time.py | depop/sloc_report | 0e65ece3f5c95e6ab38313b38c110fea275326b2 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
import time
def time_now():
"""returns current unix time as an integer"""
return int(time.time())
def get_day_times(num_days=1, end_time=time_now()):
"""returns a list of tuples, where each tuple contains the start and end
times (in unix time format, as integers) of the day(s). The end_time is by
default the current time.
:param num_days: Number of days to return
:param end_time: The time to start counting back from. Default: now
"""
day_times = []
for day in range(0, num_days):
start_time = end_time - (24 * 3600)
# day_times.insert(0, (start_time, end_time))
day_times.insert(0, (start_time, end_time))
end_time -= (24 * 3600)
return day_times
| 30.24 | 78 | 0.650794 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 421 | 0.556878 |
930f2ed4f673b85667c94e0ca3470af435e95eb6 | 4,593 | py | Python | tests/home_platform/test_env.py | LuCeHe/home-platform | 06f9370bfacecebd0c8623a3b8f0511532a9a1f0 | [
"BSD-3-Clause"
] | 1 | 2021-07-27T09:15:43.000Z | 2021-07-27T09:15:43.000Z | tests/home_platform/test_env.py | LuCeHe/home-platform | 06f9370bfacecebd0c8623a3b8f0511532a9a1f0 | [
"BSD-3-Clause"
] | null | null | null | tests/home_platform/test_env.py | LuCeHe/home-platform | 06f9370bfacecebd0c8623a3b8f0511532a9a1f0 | [
"BSD-3-Clause"
] | 1 | 2021-07-27T09:21:12.000Z | 2021-07-27T09:21:12.000Z | # Copyright (c) 2017, IGLU consortium
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# - Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# - Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# - Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
# INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
# NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
# OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
import os
import sys
import time
import multiprocessing
import logging
import numpy as np
import unittest
import matplotlib.pyplot as plt
from home_platform.env import BasicEnvironment
from panda3d.core import LVector3f
TEST_DATA_DIR = os.path.join(os.path.dirname(
os.path.realpath(__file__)), "..", "data")
TEST_SUNCG_DATA_DIR = os.path.join(os.path.dirname(
os.path.realpath(__file__)), "..", "data", "suncg")
class TestBasicEnvironment(unittest.TestCase):
def testRender(self):
env = BasicEnvironment("0004d52d1aeeb8ae6de39d6bd993e992",
suncgDatasetRoot=TEST_SUNCG_DATA_DIR, depth=True)
env.agent.setPos(LVector3f(42, -39, 1))
env.agent.setHpr(LVector3f(60.0, 0.0, 0.0))
env.step()
image = env.renderWorld.getRgbImages()['agent-0']
depth = env.renderWorld.getDepthImages(mode='distance')['agent-0']
fig = plt.figure(figsize=(16, 8))
plt.axis("off")
ax = plt.subplot(121)
ax.imshow(image)
ax = plt.subplot(122)
ax.imshow(depth / np.max(depth), cmap='binary')
plt.show(block=False)
time.sleep(1.0)
plt.close(fig)
env.destroy()
def testGenerateSpawnPositions(self):
env = BasicEnvironment("0004d52d1aeeb8ae6de39d6bd993e992",
suncgDatasetRoot=TEST_SUNCG_DATA_DIR, depth=False)
occupancyMap, occupancyMapCoord, positions = env.generateSpawnPositions(
n=10)
xmin, ymin = np.min(occupancyMapCoord, axis=(0, 1))
xmax, ymax = np.max(occupancyMapCoord, axis=(0, 1))
fig = plt.figure()
plt.axis("on")
ax = plt.subplot(111)
ax.imshow(occupancyMap, cmap='gray', extent=[xmin, xmax, ymin, ymax])
ax.scatter(positions[:, 0], positions[:, 1], s=40, c=[1.0, 0.0, 0.0])
plt.show(block=False)
time.sleep(1.0)
plt.close(fig)
env.destroy()
def testMultiprocessing(self):
# Spawn new process with independent simulations using the
# multiprocessing module
# Not supported in OSX for now
if sys.platform == 'darwin':
return
nbProcesses = 2
nbSteps = 100
def worker():
env = BasicEnvironment("0004d52d1aeeb8ae6de39d6bd993e992", suncgDatasetRoot=TEST_SUNCG_DATA_DIR,
depth=False, debug=True)
env.agent.setPos(LVector3f(45, -42, 1))
env.agent.setHpr(LVector3f(45.0, 0.0, 0.0))
# Simulation loop
for _ in range(nbSteps):
env.step()
_ = env.getObservation()
env.destroy()
processes = []
for _ in range(nbProcesses):
p = multiprocessing.Process(target=worker)
processes.append(p)
p.start()
for p in processes:
p.join()
if __name__ == '__main__':
logging.basicConfig(level=logging.WARN)
np.seterr(all='raise')
unittest.main()
| 34.276119 | 108 | 0.660353 | 2,499 | 0.544089 | 0 | 0 | 0 | 0 | 0 | 0 | 1,855 | 0.403875 |
931108d420be8aaf39aff7a40c0bd93abe403cb2 | 9,244 | py | Python | slack_sdk/oauth/installation_store/sqlite3/__init__.py | timgates42/python-slack-sdk | 6339fbe81031c9aec3f95927ac03706fd31f3544 | [
"MIT"
] | null | null | null | slack_sdk/oauth/installation_store/sqlite3/__init__.py | timgates42/python-slack-sdk | 6339fbe81031c9aec3f95927ac03706fd31f3544 | [
"MIT"
] | null | null | null | slack_sdk/oauth/installation_store/sqlite3/__init__.py | timgates42/python-slack-sdk | 6339fbe81031c9aec3f95927ac03706fd31f3544 | [
"MIT"
] | null | null | null | import logging
import sqlite3
from logging import Logger
from sqlite3 import Connection
from typing import Optional
from slack_sdk.oauth.installation_store.async_installation_store import (
AsyncInstallationStore,
)
from slack_sdk.oauth.installation_store.installation_store import InstallationStore
from slack_sdk.oauth.installation_store.models.bot import Bot
from slack_sdk.oauth.installation_store.models.installation import Installation
class SQLite3InstallationStore(InstallationStore, AsyncInstallationStore):
def __init__(
self,
*,
database: str,
client_id: str,
logger: Logger = logging.getLogger(__name__),
):
self.database = database
self.client_id = client_id
self.init_called = False
self._logger = logger
@property
def logger(self) -> Logger:
if self._logger is None:
self._logger = logging.getLogger(__name__)
return self._logger
def init(self):
try:
with sqlite3.connect(database=self.database) as conn:
cur = conn.execute("select count(1) from slack_installations;")
row_num = cur.fetchone()[0]
self.logger.debug(
f"{row_num} installations are stored in {self.database}"
)
except Exception: # skipcq: PYL-W0703
self.create_tables()
self.init_called = True
def connect(self) -> Connection:
if not self.init_called:
self.init()
return sqlite3.connect(database=self.database)
def create_tables(self):
with sqlite3.connect(database=self.database) as conn:
conn.execute(
"""
create table slack_installations (
id integer primary key autoincrement,
client_id text not null,
app_id text not null,
enterprise_id text not null default '',
team_id text not null default '',
bot_token text not null,
bot_id text not null,
bot_user_id text not null,
bot_scopes text,
user_id text not null,
user_token text,
user_scopes text,
incoming_webhook_url text,
incoming_webhook_channel_id text,
incoming_webhook_configuration_url text,
installed_at datetime not null default current_timestamp
);
"""
)
conn.execute(
"""
create index slack_installations_idx on slack_installations (
client_id,
enterprise_id,
team_id,
user_id,
installed_at
);
"""
)
conn.execute(
"""
create table slack_bots (
id integer primary key autoincrement,
client_id text not null,
app_id text not null,
enterprise_id text not null default '',
team_id text not null default '',
bot_token text not null,
bot_id text not null,
bot_user_id text not null,
bot_scopes text,
installed_at datetime not null default current_timestamp
);
"""
)
conn.execute(
"""
create index slack_bots_idx on slack_bots (
client_id,
enterprise_id,
team_id,
installed_at
);
"""
)
self.logger.debug(f"Tables have been created (database: {self.database})")
conn.commit()
async def async_save(self, installation: Installation):
return self.save(installation)
def save(self, installation: Installation):
with self.connect() as conn:
conn.execute(
"""
insert into slack_bots (
client_id,
app_id,
enterprise_id,
team_id,
bot_token,
bot_id,
bot_user_id,
bot_scopes
)
values
(
?,
?,
?,
?,
?,
?,
?,
?
);
""",
[
self.client_id,
installation.app_id,
installation.enterprise_id or "",
installation.team_id or "",
installation.bot_token,
installation.bot_id,
installation.bot_user_id,
",".join(installation.bot_scopes),
],
)
conn.execute(
"""
insert into slack_installations (
client_id,
app_id,
enterprise_id,
team_id,
bot_token,
bot_id,
bot_user_id,
bot_scopes,
user_id,
user_token,
user_scopes,
incoming_webhook_url,
incoming_webhook_channel_id,
incoming_webhook_configuration_url
)
values
(
?,
?,
?,
?,
?,
?,
?,
?,
?,
?,
?,
?,
?,
?
);
""",
[
self.client_id,
installation.app_id,
installation.enterprise_id or "",
installation.team_id or "",
installation.bot_token,
installation.bot_id,
installation.bot_user_id,
",".join(installation.bot_scopes),
installation.user_id,
installation.user_token,
",".join(installation.user_scopes)
if installation.user_scopes
else None,
installation.incoming_webhook_url,
installation.incoming_webhook_channel_id,
installation.incoming_webhook_configuration_url,
],
)
self.logger.debug(
f"New rows in slack_bots and slack_installations) have been created (database: {self.database})"
)
conn.commit()
async def async_find_bot(
self, *, enterprise_id: Optional[str], team_id: Optional[str],
) -> Optional[Bot]:
return self.find_bot(enterprise_id=enterprise_id, team_id=team_id)
def find_bot(
self, *, enterprise_id: Optional[str], team_id: Optional[str],
) -> Optional[Bot]:
# Not yet implemented: org-apps support
try:
with self.connect() as conn:
cur = conn.execute(
"""
select
app_id,
enterprise_id,
team_id,
bot_token,
bot_id,
bot_user_id,
bot_scopes,
installed_at
from
slack_bots
where
client_id = ?
and
enterprise_id = ?
and
team_id = ?
order by installed_at desc
limit 1
""",
[self.client_id, enterprise_id or "", team_id or ""],
)
row = cur.fetchone()
result = "found" if row and len(row) > 0 else "not found"
self.logger.debug(
f"find_bot's query result: {result} (database: {self.database})"
)
if row and len(row) > 0:
bot = Bot(
app_id=row[0],
enterprise_id=row[1],
team_id=row[2],
bot_token=row[3],
bot_id=row[4],
bot_user_id=row[5],
bot_scopes=row[6],
installed_at=row[7],
)
return bot
return None
except Exception as e: # skipcq: PYL-W0703
message = f"Failed to find bot installation data for enterprise: {enterprise_id}, team: {team_id}: {e}"
self.logger.warning(message)
return None
| 33.860806 | 115 | 0.442449 | 8,794 | 0.95132 | 0 | 0 | 157 | 0.016984 | 289 | 0.031264 | 4,511 | 0.487992 |
93131be5c2c31240f968a45b8920858ae167fb1c | 381 | py | Python | address/admin.py | City-of-Helsinki/geo-search | d200e06223938e456d13ae9ab94e9dad149216d0 | [
"MIT"
] | null | null | null | address/admin.py | City-of-Helsinki/geo-search | d200e06223938e456d13ae9ab94e9dad149216d0 | [
"MIT"
] | 11 | 2021-09-13T11:46:17.000Z | 2022-03-11T08:40:02.000Z | address/admin.py | City-of-Helsinki/geo-search | d200e06223938e456d13ae9ab94e9dad149216d0 | [
"MIT"
] | 1 | 2021-10-04T09:15:44.000Z | 2021-10-04T09:15:44.000Z | from django.contrib import admin
from parler.admin import TranslatableAdmin
from .models import Address, Municipality, Street
@admin.register(Municipality)
class MunicipalityAdmin(TranslatableAdmin):
pass
@admin.register(Street)
class StreetAdmin(TranslatableAdmin):
pass
@admin.register(Address)
class AddressAdmin(admin.ModelAdmin):
raw_id_fields = ["street"]
| 19.05 | 49 | 0.792651 | 166 | 0.435696 | 0 | 0 | 245 | 0.643045 | 0 | 0 | 8 | 0.020997 |
93146c79f274fc09fa5d075dfc9d826dd48bbd57 | 7,704 | py | Python | src/pynwb/core.py | q0j0p/pynwb | 6c3a864662f2cce63b542f9f614cc632903f9bc1 | [
"BSD-3-Clause-LBNL"
] | null | null | null | src/pynwb/core.py | q0j0p/pynwb | 6c3a864662f2cce63b542f9f614cc632903f9bc1 | [
"BSD-3-Clause-LBNL"
] | 1 | 2021-06-01T22:25:36.000Z | 2021-06-01T22:25:36.000Z | src/pynwb/core.py | q0j0p/pynwb | 6c3a864662f2cce63b542f9f614cc632903f9bc1 | [
"BSD-3-Clause-LBNL"
] | null | null | null | from collections import Iterable
from h5py import RegionReference
from .form.utils import docval, getargs, ExtenderMeta, call_docval_func, popargs
from .form import Container, Data, DataRegion, get_region_slicer
from . import CORE_NAMESPACE, register_class
from six import with_metaclass
def set_parents(container, parent):
if isinstance(container, list):
for c in container:
if c.parent is None:
c.parent = parent
ret = container
else:
ret = [container]
if container.parent is None:
container.parent = parent
return ret
class NWBBaseType(with_metaclass(ExtenderMeta)):
'''The base class to any NWB types.
The purpose of this class is to provide a mechanism for representing hierarchical
relationships in neurodata.
'''
__nwbfields__ = tuple()
@docval({'name': 'name', 'type': str, 'doc': 'the name of this container'},
{'name': 'parent', 'type': 'NWBContainer',
'doc': 'the parent Container for this Container', 'default': None},
{'name': 'container_source', 'type': object,
'doc': 'the source of this Container e.g. file name', 'default': None})
def __init__(self, **kwargs):
parent, container_source = getargs('parent', 'container_source', kwargs)
super(NWBBaseType, self).__init__()
self.__fields = dict()
self.__parent = None
self.__name = getargs('name', kwargs)
if parent:
self.parent = parent
self.__container_source = container_source
@property
def name(self):
return self.__name
@property
def container_source(self):
'''The source of this Container e.g. file name or table
'''
return self.__container_source
@property
def fields(self):
return self.__fields
@property
def parent(self):
'''The parent NWBContainer of this NWBContainer
'''
return self.__parent
@parent.setter
def parent(self, parent_container):
if self.__parent is not None:
raise Exception('cannot reassign parent')
self.__parent = parent_container
@staticmethod
def __getter(nwbfield):
def _func(self):
return self.fields.get(nwbfield)
return _func
@staticmethod
def __setter(nwbfield):
def _func(self, val):
if nwbfield in self.fields:
msg = "can't set attribute '%s' -- already set" % nwbfield
raise AttributeError(msg)
self.fields[nwbfield] = val
return _func
@ExtenderMeta.pre_init
def __gather_nwbfields(cls, name, bases, classdict):
'''
This classmethod will be called during class declaration in the metaclass to automatically
create setters and getters for NWB fields that need to be exported
'''
if not isinstance(cls.__nwbfields__, tuple):
raise TypeError("'__nwbfields__' must be of type tuple")
if len(bases) and 'NWBContainer' in globals() and issubclass(bases[-1], NWBContainer) \
and bases[-1].__nwbfields__ is not cls.__nwbfields__:
new_nwbfields = list(cls.__nwbfields__)
new_nwbfields[0:0] = bases[-1].__nwbfields__
cls.__nwbfields__ = tuple(new_nwbfields)
for f in cls.__nwbfields__:
if not hasattr(cls, f):
setattr(cls, f, property(cls.__getter(f), cls.__setter(f)))
@register_class('NWBContainer', CORE_NAMESPACE)
class NWBContainer(NWBBaseType, Container):
__nwbfields__ = ('source',
'help')
@docval({'name': 'source', 'type': str, 'doc': 'a description of where this NWBContainer came from'},
{'name': 'name', 'type': str, 'doc': 'the name of this container'},
{'name': 'parent', 'type': 'NWBContainer',
'doc': 'the parent Container for this Container', 'default': None},
{'name': 'container_source', 'type': object,
'doc': 'the source of this Container e.g. file name', 'default': None})
def __init__(self, **kwargs):
call_docval_func(super(NWBContainer, self).__init__, kwargs)
self.source = getargs('source', kwargs)
@register_class('NWBData', CORE_NAMESPACE)
class NWBData(NWBBaseType, Data):
__nwbfields__ = ('help',)
@docval({'name': 'name', 'type': str, 'doc': 'the name of this container'},
{'name': 'data', 'type': (Iterable, Data), 'doc': 'the source of the data'},
{'name': 'parent', 'type': 'NWBContainer',
'doc': 'the parent Container for this Container', 'default': None},
{'name': 'container_source', 'type': object,
'doc': 'the source of this Container e.g. file name', 'default': None})
def __init__(self, **kwargs):
call_docval_func(super(NWBData, self).__init__, kwargs)
self.__data = getargs('data', kwargs)
@property
def data(self):
return self.__data
class NWBTable(NWBData):
@docval({'name': 'columns', 'type': (list, tuple), 'doc': 'a list of the columns in this table'},
{'name': 'name', 'type': str, 'doc': 'the name of this container'},
{'name': 'data', 'type': Iterable, 'doc': 'the source of the data', 'default': list()},
{'name': 'parent', 'type': 'NWBContainer',
'doc': 'the parent Container for this Container', 'default': None},
{'name': 'container_source', 'type': object,
'doc': 'the source of this Container e.g. file name', 'default': None})
def __init__(self, **kwargs):
self.__columns = tuple(popargs('columns', kwargs))
call_docval_func(super(NWBTable, self).__init__, kwargs)
@property
def columns(self):
return self.__columns
@docval({'name': 'values', 'type': dict, 'doc': 'the values for each column'})
def add_row(self, **kwargs):
values = getargs('values', kwargs)
if not isinstance(self.data, list):
msg = 'Cannot append row to %s' % type(self.data)
raise ValueError(msg)
self.data.append(tuple(values[col] for col in self.columns))
@docval({'name': 'kwargs', 'type': dict, 'doc': 'the column to query by'})
def query(self, **kwargs):
'''
Query a table
'''
raise NotImplementedError('query')
def __len__(self):
return len(self.data)
def __getitem__(self, idx):
return self.data[idx]
# diamond inheritence
class NWBTableRegion(NWBData, DataRegion):
'''
A class for representing regions i.e. slices or indices into an NWBTable
'''
@docval({'name': 'name', 'type': str, 'doc': 'the name of this container'},
{'name': 'table', 'type': NWBTable, 'doc': 'the ElectrodeTable this region applies to'},
{'name': 'region', 'type': (slice, list, tuple, RegionReference), 'doc': 'the indices of the table'})
def __init__(self, **kwargs):
table, region = getargs('table', 'region', kwargs)
self.__table = table
self.__region = region
name = getargs('name', kwargs)
super(NWBTableRegion, self).__init__(name, table)
self.__regionslicer = get_region_slicer(self.__table.data, self.__region)
@property
def table(self):
'''The ElectrodeTable this region applies to'''
return self.__table
@property
def region(self):
'''The indices into table'''
return self.__region
def __len__(self):
return len(self.__regionslicer)
def __getitem__(self, idx):
return self.__regionslicer[idx]
| 35.666667 | 113 | 0.610462 | 6,967 | 0.904335 | 0 | 0 | 6,305 | 0.818406 | 0 | 0 | 2,405 | 0.312175 |
9314b87cfdabe5d7ea05f8832e4b689d86e607b3 | 2,246 | py | Python | tests/test_get_google_streetview.py | AQ-AI/open-geo-engine | a9382083d5a51ffef4af0a76df7a6e15d64cbaf5 | [
"BSD-3-Clause"
] | null | null | null | tests/test_get_google_streetview.py | AQ-AI/open-geo-engine | a9382083d5a51ffef4af0a76df7a6e15d64cbaf5 | [
"BSD-3-Clause"
] | 19 | 2022-01-15T19:13:26.000Z | 2022-03-20T22:30:00.000Z | tests/test_get_google_streetview.py | AQ-AI/open-geo-engine | a9382083d5a51ffef4af0a76df7a6e15d64cbaf5 | [
"BSD-3-Clause"
] | 2 | 2022-01-18T11:34:28.000Z | 2022-03-01T15:54:45.000Z | import os
import pandas as pd
from open_geo_engine.src.get_google_streetview import GetGoogleStreetView
def test_get_google_streetview():
size = "600x300"
heading = "151.78"
pitch = "-0.76"
key = os.environ.get("GOOGLE_DEV_API_KEY")
image_folder = "tests/test_data"
links_file = "tests/test_data/streetview_links.txt"
metadata_file = "tests/test_data/test_metadata.json"
place = "Parque_El_Retiro_Madrid"
meta_base = "https://maps.googleapis.com/maps/api/streetview/metadata?"
satellite_data_df = pd.DataFrame(
{
"longitude": [-3.683317243711068, -3.683317243711068],
"latitude": [40.41498005371624, 40.41498005371624],
"time": [1578653746335, 1580036142137],
"datetime": ["2020-01-10 10:55:46.335,", "2020-01-26 10:55:42.137"],
"B4": [7053, 6869],
"B3": [7177, 7069],
"B2": [7825, 7720],
}
)
get_google_streetview = GetGoogleStreetView(
size,
heading,
pitch,
key,
image_folder,
links_file,
metadata_file,
place,
meta_base,
)
assert (
get_google_streetview.generate_lat_lon_string(satellite_data_df)
== "40.41498005371624,-3.683317243711068"
)
lat_lon_str = get_google_streetview.generate_lat_lon_string(satellite_data_df)
params = get_google_streetview._generate_params(lat_lon_str)
satellite_data_df["lat_lon_str"] = get_google_streetview._join_lat_lon(satellite_data_df)
assert satellite_data_df["lat_lon_str"][0] == str(lat_lon_str)
assert (
get_google_streetview.add_metadata_to_satellite_df(satellite_data_df)["metadata"][0]
== "<Response [200]>"
)
params.pop("key")
assert params == {
"size": "600x300",
"location": "40.41498005371624,-3.683317243711068",
"pitch": "-0.76",
}
satellite_streetview_data_df = get_google_streetview.add_links_to_satellite_df(
satellite_data_df
)
assert satellite_streetview_data_df["latitude"][0] == 40.41498005371624
assert satellite_streetview_data_df["longitude"][0] == -3.683317243711068
assert len(satellite_streetview_data_df.columns) == 10
| 32.085714 | 93 | 0.661175 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 514 | 0.228851 |
93157bdf0f57eb1e0ef1f559e165114bea5a6259 | 6,186 | py | Python | src/cirrus/selfupdate.py | Maxsparrow/cirrus | ae9639daba4f2d8d9285e98d5b11a89eac573f96 | [
"Apache-2.0"
] | 12 | 2016-04-30T16:13:55.000Z | 2021-01-20T23:42:31.000Z | src/cirrus/selfupdate.py | Maxsparrow/cirrus | ae9639daba4f2d8d9285e98d5b11a89eac573f96 | [
"Apache-2.0"
] | 153 | 2015-02-12T15:25:42.000Z | 2020-03-09T07:16:15.000Z | src/cirrus/selfupdate.py | Maxsparrow/cirrus | ae9639daba4f2d8d9285e98d5b11a89eac573f96 | [
"Apache-2.0"
] | 7 | 2015-06-15T21:30:38.000Z | 2020-02-17T02:13:00.000Z | #!/usr/bin/env python
"""
_selfupdate_
Util command for updating the cirrus install itself
Supports getting a spefified branch or tag, or defaults to
looking up the latest release and using that instead.
"""
import sys
import argparse
import arrow
import os
import requests
import inspect
import contextlib
from cirrus.invoke_helpers import local
import cirrus
from cirrus.configuration import load_configuration
from cirrus.environment import cirrus_home, virtualenv_home, is_anaconda
from cirrus.github_tools import get_releases
from cirrus.git_tools import update_to_branch, update_to_tag
from cirrus.logger import get_logger
LOGGER = get_logger()
PYPI_JSON_URL = "https://pypi.python.org/pypi/cirrus-cli/json"
@contextlib.contextmanager
def chdir(dirname=None):
curdir = os.getcwd()
try:
if dirname is not None:
os.chdir(dirname)
yield
finally:
os.chdir(curdir)
def build_parser(argslist):
"""
_build_parser_
Set up command line parser for the selfupdate command
"""
parser = argparse.ArgumentParser(
description=(
'git cirrus selfupdate command, '
'used to update cirrus itself'
)
)
parser.add_argument('command', nargs='?')
parser.add_argument(
'--version',
help='specify a tag to install',
required=False,
default=None,
)
parser.add_argument(
'--upgrade-setuptools',
help="upgrade setuptools in cirrus installation (needed for some conda installations)",
default=False,
action='store_true'
)
parser.add_argument(
'--branch',
help='specify a branch to use',
required=False,
default=None,
)
parser.add_argument(
'--legacy-repo',
help='Use the old, non pip update process',
required=False,
dest='legacy_repo',
action='store_true',
default=False,
)
opts = parser.parse_args(argslist)
return opts
def sort_by_date(d1):
"""
cmp function to sort by datetime string
that is second element of tuples in list
"""
return arrow.get(d1[1])
def latest_release(config):
"""
_latest_release_
pull list of releases from GH repo, pick the newest by
publication date.
"""
releases = get_releases(config.organisation_name(), config.package_name())
tags = [(release['tag_name'], release['published_at']) for release in releases]
sorted(tags, key=sort_by_date)
most_recent_tag = tags[0][0]
return most_recent_tag
def latest_pypi_release():
"""grab latest release from pypi"""
resp = requests.get(PYPI_JSON_URL)
resp.raise_for_status()
content = resp.json()
latest = content['info']['version']
return latest
def find_cirrus_install():
"""
_find_cirrus_install_
Use inspect to find root path of install so we
can cd there and run the cirrus updates in the right location
The install process will check out the cirrus repo, the cirrus
module will be in src/cirrus of that dir
"""
cirrus_mod = os.path.dirname(inspect.getsourcefile(cirrus))
src_dir = os.path.dirname(cirrus_mod)
cirrus_dir = os.path.dirname(src_dir)
return cirrus_dir
def setup_develop(config):
"""
_setup_develop_
run local python setup.py develop via fab
"""
LOGGER.info("running setup.py develop...")
local(
'git cirrus build --upgrade'
)
local(
' . ./{0}/bin/activate && python setup.py develop'.format(
config.venv_name()
)
)
return
def pip_install(version, update_setuptools=False):
"""pip install the version of cirrus requested"""
pip_req = 'cirrus-cli=={0}'.format(version)
venv_path = virtualenv_home()
venv_name = os.path.basename(venv_path)
LOGGER.info("running pip upgrade...")
if is_anaconda():
if update_setuptools:
local(
'source {0}/bin/activate {1} && pip install --upgrade setuptools'.format(
venv_path, venv_path
)
)
local(
'source {0}/bin/activate {1} && pip install --upgrade {2}'.format(
venv_path, venv_path, pip_req
)
)
else:
if update_setuptools:
local(
' . ./{0}/bin/activate && pip install --upgrade setuptools'.format(
venv_name
)
)
local(
' . ./{0}/bin/activate && pip install --upgrade {1}'.format(
venv_name, pip_req
)
)
def legacy_update(opts):
"""update repo installed cirrus"""
install = find_cirrus_install()
with chdir(install):
config = load_configuration()
if opts.branch and opts.version:
msg = "Can specify branch -OR- version, not both"
raise RuntimeError(msg)
if opts.branch is not None:
update_to_branch(opts.branch, config)
setup_develop(config)
return
if opts.version is not None:
tag = opts.version
else:
tag = latest_release(config)
LOGGER.info("Retrieved latest tag: {0}".format(tag))
update_to_tag(tag, config)
setup_develop(config)
def pip_update(opts):
"""update pip installed cirrus"""
install = cirrus_home()
with chdir(install):
if opts.version is not None:
tag = opts.version
LOGGER.info("tag specified: {0}".format(tag))
else:
# should probably be a pip call now...
tag = latest_pypi_release()
LOGGER.info("Retrieved latest tag: {0}".format(tag))
pip_install(tag, opts.upgrade_setuptools)
def main():
"""
_main_
parse command line opts and deduce wether to check out
a branch or tag, default behaviour is to look up latest
release on github and install that
"""
opts = build_parser(sys.argv)
if opts.legacy_repo:
legacy_update(opts)
else:
pip_update(opts)
return
if __name__ == '__main__':
main()
| 24.943548 | 95 | 0.619787 | 0 | 0 | 172 | 0.027805 | 199 | 0.032169 | 0 | 0 | 2,165 | 0.349984 |
9315f46f0473c586d45b30c9a29e297a98f70f78 | 1,335 | py | Python | util.py | DrD1esel/GoWDiscordTeamBot | 9e61a386e2d341a42dd45b60eda7f57a95100283 | [
"BSD-3-Clause"
] | null | null | null | util.py | DrD1esel/GoWDiscordTeamBot | 9e61a386e2d341a42dd45b60eda7f57a95100283 | [
"BSD-3-Clause"
] | null | null | null | util.py | DrD1esel/GoWDiscordTeamBot | 9e61a386e2d341a42dd45b60eda7f57a95100283 | [
"BSD-3-Clause"
] | null | null | null | from base_bot import log
def atoi(text):
return int(text) if text.isdigit() else text
def bool_to_emoticon(value):
return value and "✅" or "❌"
# https://stackoverflow.com/questions/7204805/how-to-merge-dictionaries-of-dictionaries
# merges b into a
def merge(a, b, path=None):
if path is None: path = []
for key in b:
if key in a:
if isinstance(a[key], dict) and isinstance(b[key], dict):
merge(a[key], b[key], path + [str(key)])
else:
a[key] = b[key]
return a
def flatten(*args):
lst = []
for arg in args:
if type(arg) == str and arg != '':
lst.append(arg)
elif type(arg) == list:
lst.extend(arg)
return lst
async def pluralize_author(author):
if author[-1] == 's':
author += "'"
else:
author += "'s"
return author
def chunks(iterable, chunk_size):
for i in range(0, len(iterable), chunk_size):
yield iterable[i:i + chunk_size]
def debug(message):
guild = '-'
if message.guild:
guild = message.guild.name
log.debug(f'[{guild}][{message.channel}][{message.author.display_name}] {message.content}')
def convert_color_array(data_object):
return [c.replace('Color', '').lower() for c, v in data_object['ManaColors'].items() if v]
| 23.421053 | 95 | 0.591011 | 0 | 0 | 124 | 0.092606 | 0 | 0 | 134 | 0.100075 | 230 | 0.17177 |
93163adbfdd7c695f51e1b653629e2cdd3f9438d | 877 | py | Python | jsgf_tags.py | onchiptech/pyjsgf | f7ff26323e5e602ea10e7d302610c2fcb46234d6 | [
"MIT"
] | null | null | null | jsgf_tags.py | onchiptech/pyjsgf | f7ff26323e5e602ea10e7d302610c2fcb46234d6 | [
"MIT"
] | null | null | null | jsgf_tags.py | onchiptech/pyjsgf | f7ff26323e5e602ea10e7d302610c2fcb46234d6 | [
"MIT"
] | null | null | null | from jsgf import parse_grammar_string
def main(args):
# Parse input grammar file.
with open(args.input_file_path, "r") as fp:
text = fp.read()
print("\ninput grammar: ")
print(text)
grammar = parse_grammar_string(text)
# Print it.
print("\noutput grammar: ")
text = grammar.compile()
print(text)
with open(args.output_file_path, "w") as fp:
fp.write(text)
if __name__ == '__main__':
import argparse
"""Prepares arguments for the demo"""
# Usage: python jsgf_tags.py test.jsgf out.jsgf
parser = argparse.ArgumentParser(description='expand JSGF grammar tags.')
parser.add_argument('input_file_path', type=str, help='Input JSGF grammar file path.')
parser.add_argument('output_file_path', type=str, help='Output file path.')
args = parser.parse_args()
main(args)
| 28.290323 | 90 | 0.654504 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 290 | 0.330673 |
931a1eefc8b03639fa3b9346aaf0553aa1293c86 | 2,685 | py | Python | webspider/utils/log.py | chem2099/webspider | 5577878b69d85a4b75ece6701f6018767d0ee4bc | [
"MIT"
] | 256 | 2017-06-01T10:56:41.000Z | 2019-06-28T17:26:08.000Z | webspider/utils/log.py | chem2099/webspider | 5577878b69d85a4b75ece6701f6018767d0ee4bc | [
"MIT"
] | 14 | 2017-10-30T14:32:08.000Z | 2019-05-27T09:49:39.000Z | webspider/utils/log.py | chem2099/webspider | 5577878b69d85a4b75ece6701f6018767d0ee4bc | [
"MIT"
] | 88 | 2017-09-01T08:06:48.000Z | 2019-06-26T06:23:04.000Z | # coding: utf-8
import os
import logging.config
from webspider import setting
LOG_FILE_PATH = os.path.join(setting.BASE_DIR, 'log', 'spider_log.txt')
LOGGING_CONFIG = {
'version': 1,
'disable_existing_loggers': True,
'formatters': {
'default': {
'format': '%(asctime)s- %(module)s:%(lineno)d [%(levelname)1.1s] %(name)s: %(message)s',
'datefmt': '%Y/%m/%d %H:%M:%S'
},
},
'handlers': {
'console': {
'level': 'DEBUG',
'formatter': 'default',
'class': 'logging.StreamHandler'
},
'smtp': {
'level': 'ERROR',
'class': 'logging.handlers.SMTPHandler',
'formatter': 'default',
'mailhost': (setting.SMTP_CONF['host'], setting.SMTP_CONF['port']),
'fromaddr': setting.SMTP_CONF['from_email'],
'toaddrs': [setting.SMTP_CONF['to_email'], ],
'subject': '爬虫系统出现异常',
'credentials': (setting.MAIL_CONF['username'], setting.MAIL_CONF['password'])
},
'file': {
'level': 'ERROR',
'formatter': 'default',
'class': 'logging.handlers.RotatingFileHandler',
'filename': LOG_FILE_PATH,
'encoding': 'utf8'
},
},
'loggers': {
'': {
'handlers': ['console', 'file'],
'level': 'DEBUG',
'propagate': False,
},
'webspider': {
'handlers': ['console', 'file'],
'level': 'DEBUG',
'propagate': False,
},
'tornado': {
'handlers': ['console', 'file'],
'level': 'DEBUG',
'propagate': False,
},
'tornado.access': {
'handlers': ['console', 'file'],
'level': 'INFO',
'propagate': False,
},
'tornado.application': {
'handlers': ['console', 'file'],
'level': 'INFO',
'propagate': False,
},
'tornado.general': {
'handlers': ['console', 'file'],
'propagate': False,
'level': 'INFO',
},
'sqlalchemy.engine': {
'handlers': ['console', 'file'],
'level': 'INFO',
'propagate': False,
},
'gunicorn': {
'handlers': ['console', 'file'],
'level': 'INFO',
'propagate': False,
},
'celery': {
'handlers': ['console', 'file'],
'level': 'DEBUG',
'propagate': False,
},
},
}
def config_logging():
"""配置日志"""
logging.config.dictConfig(LOGGING_CONFIG)
| 27.397959 | 100 | 0.448417 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,192 | 0.440015 |
931abc49c69a201ff08a4c4d4963b916bcd4009f | 53,851 | py | Python | manila/tests/share/drivers/hitachi/hnas/test_driver.py | kpawar89/manila | d487c2db728cedf8357b9f4acbc0a45c21c3a83e | [
"Apache-2.0"
] | 1 | 2020-06-17T13:20:21.000Z | 2020-06-17T13:20:21.000Z | manila/tests/share/drivers/hitachi/hnas/test_driver.py | viroel/manila | fbcabd2c03985000bd9b4d4d9a4478bc0b784efa | [
"Apache-2.0"
] | null | null | null | manila/tests/share/drivers/hitachi/hnas/test_driver.py | viroel/manila | fbcabd2c03985000bd9b4d4d9a4478bc0b784efa | [
"Apache-2.0"
] | 1 | 2021-02-23T05:52:11.000Z | 2021-02-23T05:52:11.000Z | # Copyright (c) 2015 Hitachi Data Systems, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from unittest import mock
import ddt
from oslo_config import cfg
from manila import exception
import manila.share.configuration
import manila.share.driver
from manila.share.drivers.hitachi.hnas import driver
from manila.share.drivers.hitachi.hnas import ssh
from manila import test
CONF = cfg.CONF
share_nfs = {
'id': 'aa4a7710-f326-41fb-ad18-b4ad587fc87a',
'name': 'aa4a7710-f326-41fb-ad18-b4ad587fc87a',
'size': 50,
'host': 'hnas',
'share_proto': 'NFS',
'share_type_id': 1,
'share_network_id': 'bb329e24-3bdb-491d-acfd-dfe70c09b98d',
'share_server_id': 'cc345a53-491d-acfd-3bdb-dfe70c09b98d',
'export_locations': [{'path': '172.24.44.10:/shares/'
'aa4a7710-f326-41fb-ad18-b4ad587fc87a'}],
}
share_cifs = {
'id': 'f5cadaf2-afbe-4cc4-9021-85491b6b76f7',
'name': 'f5cadaf2-afbe-4cc4-9021-85491b6b76f7',
'size': 50,
'host': 'hnas',
'share_proto': 'CIFS',
'share_type_id': 1,
'share_network_id': 'bb329e24-3bdb-491d-acfd-dfe70c09b98d',
'share_server_id': 'cc345a53-491d-acfd-3bdb-dfe70c09b98d',
'export_locations': [{'path': '\\\\172.24.44.10\\'
'f5cadaf2-afbe-4cc4-9021-85491b6b76f7'}],
}
share_invalid_host = {
'id': 'aa4a7710-f326-41fb-ad18-b4ad587fc87a',
'name': 'aa4a7710-f326-41fb-ad18-b4ad587fc87a',
'size': 50,
'host': 'invalid',
'share_proto': 'NFS',
'share_type_id': 1,
'share_network_id': 'bb329e24-3bdb-491d-acfd-dfe70c09b98d',
'share_server_id': 'cc345a53-491d-acfd-3bdb-dfe70c09b98d',
'export_locations': [{'path': '172.24.44.10:/shares/'
'aa4a7710-f326-41fb-ad18-b4ad587fc87a'}],
}
share_mount_support_nfs = {
'id': '62125744-fcdd-4f55-a8c1-d1498102f634',
'name': '62125744-fcdd-4f55-a8c1-d1498102f634',
'size': 50,
'host': 'hnas',
'share_proto': 'NFS',
'share_type_id': 1,
'share_network_id': 'bb329e24-3bdb-491d-acfd-dfe70c09b98d',
'share_server_id': 'cc345a53-491d-acfd-3bdb-dfe70c09b98d',
'export_locations': [{'path': '172.24.44.10:/shares/'
'62125744-fcdd-4f55-a8c1-d1498102f634'}],
'mount_snapshot_support': True,
}
share_mount_support_cifs = {
'id': 'd6e7dc6b-f65f-49d9-968d-936f75474f29',
'name': 'd6e7dc6b-f65f-49d9-968d-936f75474f29',
'size': 50,
'host': 'hnas',
'share_proto': 'CIFS',
'share_type_id': 1,
'share_network_id': 'bb329e24-3bdb-491d-acfd-dfe70c09b98d',
'share_server_id': 'cc345a53-491d-acfd-3bdb-dfe70c09b98d',
'export_locations': [{'path': '172.24.44.10:/shares/'
'd6e7dc6b-f65f-49d9-968d-936f75474f29'}],
'mount_snapshot_support': True,
}
access_nfs_rw = {
'id': 'acdc7172b-fe07-46c4-b78f-df3e0324ccd0',
'access_type': 'ip',
'access_to': '172.24.44.200',
'access_level': 'rw',
'state': 'active',
}
access_cifs_rw = {
'id': '43167594-40e9-b899-1f4f-b9c2176b7564',
'access_type': 'user',
'access_to': 'fake_user',
'access_level': 'rw',
'state': 'active',
}
access_cifs_ro = {
'id': '32407088-1f4f-40e9-b899-b9a4176b574d',
'access_type': 'user',
'access_to': 'fake_user',
'access_level': 'ro',
'state': 'active',
}
snapshot_nfs = {
'id': 'abba6d9b-f29c-4bf7-aac1-618cda7aaf0f',
'share_id': 'aa4a7710-f326-41fb-ad18-b4ad587fc87a',
'share': share_nfs,
'provider_location': '/snapshots/aa4a7710-f326-41fb-ad18-b4ad587fc87a/'
'abba6d9b-f29c-4bf7-aac1-618cda7aaf0f',
'size': 2,
}
snapshot_cifs = {
'id': '91bc6e1b-1ba5-f29c-abc1-da7618cabf0a',
'share_id': 'f5cadaf2-afbe-4cc4-9021-85491b6b76f7',
'share': share_cifs,
'provider_location': '/snapshots/f5cadaf2-afbe-4cc4-9021-85491b6b76f7/'
'91bc6e1b-1ba5-f29c-abc1-da7618cabf0a',
'size': 2,
}
manage_snapshot = {
'id': 'bc168eb-fa71-beef-153a-3d451aa1351f',
'share_id': 'aa4a7710-f326-41fb-ad18-b4ad587fc87a',
'share': share_nfs,
'provider_location': '/snapshots/aa4a7710-f326-41fb-ad18-b4ad587fc87a'
'/snapshot18-05-2106',
}
snapshot_mount_support_nfs = {
'id': '3377b015-a695-4a5a-8aa5-9b931b023380',
'share_id': '62125744-fcdd-4f55-a8c1-d1498102f634',
'share': share_mount_support_nfs,
'provider_location': '/snapshots/62125744-fcdd-4f55-a8c1-d1498102f634'
'/3377b015-a695-4a5a-8aa5-9b931b023380',
}
snapshot_mount_support_cifs = {
'id': 'f9916515-5cb8-4612-afa6-7f2baa74223a',
'share_id': 'd6e7dc6b-f65f-49d9-968d-936f75474f29',
'share': share_mount_support_cifs,
'provider_location': '/snapshots/d6e7dc6b-f65f-49d9-968d-936f75474f29'
'/f9916515-5cb8-4612-afa6-7f2baa74223a',
}
invalid_share = {
'id': 'aa4a7710-f326-41fb-ad18-b4ad587fc87a',
'name': 'aa4a7710-f326-41fb-ad18-b4ad587fc87a',
'size': 100,
'host': 'hnas',
'share_proto': 'HDFS',
}
invalid_snapshot = {
'id': '24dcdcb5-a582-4bcc-b462-641da143afee',
'share_id': 'aa4a7710-f326-41fb-ad18-b4ad587fc87a',
'share': invalid_share,
}
invalid_access_type = {
'id': 'acdc7172b-fe07-46c4-b78f-df3e0324ccd0',
'access_type': 'cert',
'access_to': 'manila_user',
'access_level': 'rw',
'state': 'active',
}
invalid_access_level = {
'id': 'acdc7172b-fe07-46c4-b78f-df3e0324ccd0',
'access_type': 'ip',
'access_to': 'manila_user',
'access_level': '777',
'state': 'active',
}
invalid_protocol_msg = ("Share backend error: Only NFS or CIFS protocol are "
"currently supported. Share provided %(id)s with "
"protocol %(proto)s." %
{'id': invalid_share['id'],
'proto': invalid_share['share_proto']})
@ddt.ddt
class HitachiHNASTestCase(test.TestCase):
def setUp(self):
super(HitachiHNASTestCase, self).setUp()
CONF.set_default('driver_handles_share_servers', False)
CONF.hitachi_hnas_evs_id = '2'
CONF.hitachi_hnas_evs_ip = '172.24.44.10'
CONF.hitachi_hnas_admin_network_ip = '10.20.30.40'
CONF.hitachi_hnas_ip = '172.24.44.1'
CONF.hitachi_hnas_ip_port = 'hitachi_hnas_ip_port'
CONF.hitachi_hnas_user = 'hitachi_hnas_user'
CONF.hitachi_hnas_password = 'hitachi_hnas_password'
CONF.hitachi_hnas_file_system_name = 'file_system'
CONF.hitachi_hnas_ssh_private_key = 'private_key'
CONF.hitachi_hnas_cluster_admin_ip0 = None
CONF.hitachi_hnas_stalled_job_timeout = 10
CONF.hitachi_hnas_driver_helper = ('manila.share.drivers.hitachi.hnas.'
'ssh.HNASSSHBackend')
self.fake_conf = manila.share.configuration.Configuration(None)
self.fake_private_storage = mock.Mock()
self.mock_object(self.fake_private_storage, 'get',
mock.Mock(return_value=None))
self.mock_object(self.fake_private_storage, 'delete',
mock.Mock(return_value=None))
self._driver = driver.HitachiHNASDriver(
private_storage=self.fake_private_storage,
configuration=self.fake_conf)
self._driver.backend_name = "hnas"
self.mock_log = self.mock_object(driver, 'LOG')
# mocking common backend calls
self.mock_object(ssh.HNASSSHBackend, "check_fs_mounted", mock.Mock(
return_value=True))
self.mock_object(ssh.HNASSSHBackend, "check_vvol")
self.mock_object(ssh.HNASSSHBackend, "check_quota")
self.mock_object(ssh.HNASSSHBackend, "check_cifs")
self.mock_object(ssh.HNASSSHBackend, "check_export")
self.mock_object(ssh.HNASSSHBackend, 'check_directory')
@ddt.data('hitachi_hnas_driver_helper', 'hitachi_hnas_evs_id',
'hitachi_hnas_evs_ip', 'hitachi_hnas_ip', 'hitachi_hnas_user')
def test_init_invalid_conf_parameters(self, attr_name):
self.mock_object(manila.share.driver.ShareDriver, '__init__')
setattr(CONF, attr_name, None)
self.assertRaises(exception.InvalidParameterValue,
self._driver.__init__)
def test_init_invalid_credentials(self):
self.mock_object(manila.share.driver.ShareDriver,
'__init__')
CONF.hitachi_hnas_password = None
CONF.hitachi_hnas_ssh_private_key = None
self.assertRaises(exception.InvalidParameterValue,
self._driver.__init__)
@ddt.data(True, False)
def test_update_access_nfs(self, empty_rules):
if not empty_rules:
access1 = {
'access_type': 'ip',
'access_to': '172.24.10.10',
'access_level': 'rw'
}
access2 = {
'access_type': 'ip',
'access_to': '188.100.20.10',
'access_level': 'ro'
}
access_list = [access1, access2]
access_list_updated = (
[access1['access_to'] + '(' + access1['access_level'] +
',norootsquash)', access2['access_to'] + '(' +
access2['access_level'] + ')', ])
else:
access_list = []
access_list_updated = []
self.mock_object(ssh.HNASSSHBackend, "update_nfs_access_rule",
mock.Mock())
self._driver.update_access('context', share_nfs, access_list, [], [])
ssh.HNASSSHBackend.update_nfs_access_rule.assert_called_once_with(
access_list_updated, share_id=share_nfs['id'])
self.assertTrue(self.mock_log.debug.called)
def test_update_access_ip_exception(self):
access1 = {
'access_type': 'ip',
'access_to': '188.100.20.10',
'access_level': 'ro'
}
access2 = {
'access_type': 'something',
'access_to': '172.24.10.10',
'access_level': 'rw'
}
access_list = [access1, access2]
self.assertRaises(exception.InvalidShareAccess,
self._driver.update_access, 'context', share_nfs,
access_list, [], [])
def test_update_access_not_found_exception(self):
access1 = {
'access_type': 'ip',
'access_to': '188.100.20.10',
'access_level': 'ro'
}
access2 = {
'access_type': 'something',
'access_to': '172.24.10.10',
'access_level': 'rw'
}
access_list = [access1, access2]
self.mock_object(self._driver, '_ensure_share', mock.Mock(
side_effect=exception.HNASItemNotFoundException(msg='fake')))
self.assertRaises(exception.ShareResourceNotFound,
self._driver.update_access, 'context', share_nfs,
access_list, add_rules=[], delete_rules=[])
@ddt.data([access_cifs_rw, 'acr'], [access_cifs_ro, 'ar'])
@ddt.unpack
def test_allow_access_cifs(self, access_cifs, permission):
access_list_allow = [access_cifs]
self.mock_object(ssh.HNASSSHBackend, 'cifs_allow_access')
self._driver.update_access('context', share_cifs, [],
access_list_allow, [])
ssh.HNASSSHBackend.cifs_allow_access.assert_called_once_with(
share_cifs['id'], 'fake_user', permission, is_snapshot=False)
self.assertTrue(self.mock_log.debug.called)
def test_allow_access_cifs_invalid_type(self):
access_cifs_type_ip = {
'id': '43167594-40e9-b899-1f4f-b9c2176b7564',
'access_type': 'ip',
'access_to': 'fake_user',
'access_level': 'rw',
'state': 'active',
}
access_list_allow = [access_cifs_type_ip]
self.assertRaises(exception.InvalidShareAccess,
self._driver.update_access, 'context', share_cifs,
[], access_list_allow, [])
def test_deny_access_cifs(self):
access_list_deny = [access_cifs_rw]
self.mock_object(ssh.HNASSSHBackend, 'cifs_deny_access')
self._driver.update_access('context', share_cifs, [], [],
access_list_deny)
ssh.HNASSSHBackend.cifs_deny_access.assert_called_once_with(
share_cifs['id'], 'fake_user', is_snapshot=False)
self.assertTrue(self.mock_log.debug.called)
def test_deny_access_cifs_unsupported_type(self):
access_cifs_type_ip = {
'id': '43167594-40e9-b899-1f4f-b9c2176b7564',
'access_type': 'ip',
'access_to': 'fake_user',
'access_level': 'rw',
'state': 'active',
}
access_list_deny = [access_cifs_type_ip]
self.mock_object(ssh.HNASSSHBackend, 'cifs_deny_access')
self._driver.update_access('context', share_cifs, [], [],
access_list_deny)
self.assertTrue(self.mock_log.warning.called)
def test_update_access_invalid_share_protocol(self):
self.mock_object(self._driver, '_ensure_share')
ex = self.assertRaises(exception.ShareBackendException,
self._driver.update_access, 'context',
invalid_share, [], [], [])
self.assertEqual(invalid_protocol_msg, ex.msg)
def test_update_access_cifs_recovery_mode(self):
access_list = [access_cifs_rw, access_cifs_ro]
permission_list = [('fake_user1', 'acr'), ('fake_user2', 'ar')]
self.mock_object(ssh.HNASSSHBackend, 'list_cifs_permissions',
mock.Mock(return_value=permission_list))
self.mock_object(ssh.HNASSSHBackend, 'cifs_deny_access')
self.mock_object(ssh.HNASSSHBackend, 'cifs_allow_access')
self._driver.update_access('context', share_cifs, access_list, [], [])
ssh.HNASSSHBackend.list_cifs_permissions.assert_called_once_with(
share_cifs['id'])
self.assertTrue(self.mock_log.debug.called)
def _get_export(self, id, share_proto, ip, is_admin_only,
is_snapshot=False):
if share_proto.lower() == 'nfs':
if is_snapshot:
path = '/snapshots/' + id
else:
path = '/shares/' + id
export = ':'.join((ip, path))
else:
export = r'\\%s\%s' % (ip, id)
return {
"path": export,
"is_admin_only": is_admin_only,
"metadata": {},
}
@ddt.data(share_nfs, share_cifs)
def test_create_share(self, share):
self.mock_object(driver.HitachiHNASDriver, "_check_fs_mounted",
mock.Mock())
self.mock_object(ssh.HNASSSHBackend, "vvol_create")
self.mock_object(ssh.HNASSSHBackend, "quota_add")
self.mock_object(ssh.HNASSSHBackend, "nfs_export_add", mock.Mock(
return_value='/shares/' + share['id']))
self.mock_object(ssh.HNASSSHBackend, "cifs_share_add")
result = self._driver.create_share('context', share)
self.assertTrue(self.mock_log.debug.called)
ssh.HNASSSHBackend.vvol_create.assert_called_once_with(share['id'])
ssh.HNASSSHBackend.quota_add.assert_called_once_with(share['id'],
share['size'])
expected = [
self._get_export(
share['id'], share['share_proto'], self._driver.hnas_evs_ip,
False),
self._get_export(
share['id'], share['share_proto'],
self._driver.hnas_admin_network_ip, True)]
if share['share_proto'].lower() == 'nfs':
ssh.HNASSSHBackend.nfs_export_add.assert_called_once_with(
share_nfs['id'], snapshot_id=None)
self.assertFalse(ssh.HNASSSHBackend.cifs_share_add.called)
else:
ssh.HNASSSHBackend.cifs_share_add.assert_called_once_with(
share_cifs['id'], snapshot_id=None)
self.assertFalse(ssh.HNASSSHBackend.nfs_export_add.called)
self.assertEqual(expected, result)
def test_create_share_export_error(self):
self.mock_object(driver.HitachiHNASDriver, "_check_fs_mounted",
mock.Mock())
self.mock_object(ssh.HNASSSHBackend, "vvol_create")
self.mock_object(ssh.HNASSSHBackend, "quota_add")
self.mock_object(ssh.HNASSSHBackend, "nfs_export_add", mock.Mock(
side_effect=exception.HNASBackendException('msg')))
self.mock_object(ssh.HNASSSHBackend, "vvol_delete")
self.assertRaises(exception.HNASBackendException,
self._driver.create_share, 'context', share_nfs)
self.assertTrue(self.mock_log.debug.called)
ssh.HNASSSHBackend.vvol_create.assert_called_once_with(share_nfs['id'])
ssh.HNASSSHBackend.quota_add.assert_called_once_with(share_nfs['id'],
share_nfs['size'])
ssh.HNASSSHBackend.nfs_export_add.assert_called_once_with(
share_nfs['id'], snapshot_id=None)
ssh.HNASSSHBackend.vvol_delete.assert_called_once_with(share_nfs['id'])
def test_create_share_invalid_share_protocol(self):
self.mock_object(driver.HitachiHNASDriver, "_create_share",
mock.Mock(return_value="path"))
ex = self.assertRaises(exception.ShareBackendException,
self._driver.create_share, 'context',
invalid_share)
self.assertEqual(invalid_protocol_msg, ex.msg)
@ddt.data(share_nfs, share_cifs)
def test_delete_share(self, share):
self.mock_object(driver.HitachiHNASDriver, "_check_fs_mounted",
mock.Mock())
self.mock_object(ssh.HNASSSHBackend, "nfs_export_del")
self.mock_object(ssh.HNASSSHBackend, "cifs_share_del")
self.mock_object(ssh.HNASSSHBackend, "vvol_delete")
self._driver.delete_share('context', share)
self.assertTrue(self.mock_log.debug.called)
ssh.HNASSSHBackend.vvol_delete.assert_called_once_with(share['id'])
if share['share_proto'].lower() == 'nfs':
ssh.HNASSSHBackend.nfs_export_del.assert_called_once_with(
share['id'])
self.assertFalse(ssh.HNASSSHBackend.cifs_share_del.called)
else:
ssh.HNASSSHBackend.cifs_share_del.assert_called_once_with(
share['id'])
self.assertFalse(ssh.HNASSSHBackend.nfs_export_del.called)
@ddt.data(snapshot_nfs, snapshot_cifs, snapshot_mount_support_nfs,
snapshot_mount_support_cifs)
def test_create_snapshot(self, snapshot):
hnas_id = snapshot['share_id']
access_list = ['172.24.44.200(rw,norootsquash)',
'172.24.49.180(all_squash,read_write,secure)',
'172.24.49.110(ro, secure)',
'172.24.49.112(secure,readwrite,norootsquash)',
'172.24.49.142(read_only, secure)',
'172.24.49.201(rw,read_write,readwrite)',
'172.24.49.218(rw)']
ro_list = ['172.24.44.200(ro,norootsquash)',
'172.24.49.180(all_squash,ro,secure)',
'172.24.49.110(ro, secure)',
'172.24.49.112(secure,ro,norootsquash)',
'172.24.49.142(read_only, secure)',
'172.24.49.201(ro,ro,ro)',
'172.24.49.218(ro)']
export_locations = [
self._get_export(
snapshot['id'], snapshot['share']['share_proto'],
self._driver.hnas_evs_ip, False, is_snapshot=True),
self._get_export(
snapshot['id'], snapshot['share']['share_proto'],
self._driver.hnas_admin_network_ip, True, is_snapshot=True)]
expected = {'provider_location': '/snapshots/' + hnas_id + '/' +
snapshot['id']}
if snapshot['share'].get('mount_snapshot_support'):
expected['export_locations'] = export_locations
self.mock_object(ssh.HNASSSHBackend, "get_nfs_host_list", mock.Mock(
return_value=access_list))
self.mock_object(ssh.HNASSSHBackend, "update_nfs_access_rule",
mock.Mock())
self.mock_object(ssh.HNASSSHBackend, "is_cifs_in_use", mock.Mock(
return_value=False))
self.mock_object(ssh.HNASSSHBackend, "tree_clone")
self.mock_object(ssh.HNASSSHBackend, "nfs_export_add")
self.mock_object(ssh.HNASSSHBackend, "cifs_share_add")
out = self._driver.create_snapshot('context', snapshot)
ssh.HNASSSHBackend.tree_clone.assert_called_once_with(
'/shares/' + hnas_id, '/snapshots/' + hnas_id + '/' +
snapshot['id'])
self.assertEqual(expected, out)
if snapshot['share']['share_proto'].lower() == 'nfs':
ssh.HNASSSHBackend.get_nfs_host_list.assert_called_once_with(
hnas_id)
ssh.HNASSSHBackend.update_nfs_access_rule.assert_any_call(
ro_list, share_id=hnas_id)
ssh.HNASSSHBackend.update_nfs_access_rule.assert_any_call(
access_list, share_id=hnas_id)
else:
ssh.HNASSSHBackend.is_cifs_in_use.assert_called_once_with(
hnas_id)
def test_create_snapshot_invalid_protocol(self):
self.mock_object(self._driver, '_ensure_share')
ex = self.assertRaises(exception.ShareBackendException,
self._driver.create_snapshot, 'context',
invalid_snapshot)
self.assertEqual(invalid_protocol_msg, ex.msg)
def test_create_snapshot_cifs_exception(self):
cifs_excep_msg = ("Share backend error: CIFS snapshot when share is "
"mounted is disabled. Set "
"hitachi_hnas_allow_cifs_snapshot_while_mounted to "
"True or unmount the share to take a snapshot.")
self.mock_object(ssh.HNASSSHBackend, "is_cifs_in_use", mock.Mock(
return_value=True))
ex = self.assertRaises(exception.ShareBackendException,
self._driver.create_snapshot, 'context',
snapshot_cifs)
self.assertEqual(cifs_excep_msg, ex.msg)
def test_create_snapshot_first_snapshot(self):
hnas_id = snapshot_nfs['share_id']
self.mock_object(ssh.HNASSSHBackend, "get_nfs_host_list", mock.Mock(
return_value=['172.24.44.200(rw)']))
self.mock_object(ssh.HNASSSHBackend, "update_nfs_access_rule",
mock.Mock())
self.mock_object(ssh.HNASSSHBackend, "tree_clone", mock.Mock(
side_effect=exception.HNASNothingToCloneException('msg')))
self.mock_object(ssh.HNASSSHBackend, "create_directory")
self.mock_object(ssh.HNASSSHBackend, "nfs_export_add")
self.mock_object(ssh.HNASSSHBackend, "cifs_share_add")
self._driver.create_snapshot('context', snapshot_nfs)
self.assertTrue(self.mock_log.warning.called)
ssh.HNASSSHBackend.get_nfs_host_list.assert_called_once_with(
hnas_id)
ssh.HNASSSHBackend.update_nfs_access_rule.assert_any_call(
['172.24.44.200(ro)'], share_id=hnas_id)
ssh.HNASSSHBackend.update_nfs_access_rule.assert_any_call(
['172.24.44.200(rw)'], share_id=hnas_id)
ssh.HNASSSHBackend.create_directory.assert_called_once_with(
'/snapshots/' + hnas_id + '/' + snapshot_nfs['id'])
@ddt.data(snapshot_nfs, snapshot_cifs,
snapshot_mount_support_nfs, snapshot_mount_support_cifs)
def test_delete_snapshot(self, snapshot):
hnas_share_id = snapshot['share_id']
hnas_snapshot_id = snapshot['id']
self.mock_object(driver.HitachiHNASDriver, "_check_fs_mounted")
self.mock_object(ssh.HNASSSHBackend, "tree_delete")
self.mock_object(ssh.HNASSSHBackend, "delete_directory")
self.mock_object(ssh.HNASSSHBackend, "nfs_export_del")
self.mock_object(ssh.HNASSSHBackend, "cifs_share_del")
self._driver.delete_snapshot('context', snapshot)
self.assertTrue(self.mock_log.debug.called)
self.assertTrue(self.mock_log.info.called)
driver.HitachiHNASDriver._check_fs_mounted.assert_called_once_with()
ssh.HNASSSHBackend.tree_delete.assert_called_once_with(
'/snapshots/' + hnas_share_id + '/' + snapshot['id'])
ssh.HNASSSHBackend.delete_directory.assert_called_once_with(
'/snapshots/' + hnas_share_id)
if snapshot['share']['share_proto'].lower() == 'nfs':
if snapshot['share'].get('mount_snapshot_support'):
ssh.HNASSSHBackend.nfs_export_del.assert_called_once_with(
snapshot_id=hnas_snapshot_id)
else:
ssh.HNASSSHBackend.nfs_export_del.assert_not_called()
else:
if snapshot['share'].get('mount_snapshot_support'):
ssh.HNASSSHBackend.cifs_share_del.assert_called_once_with(
hnas_snapshot_id)
else:
ssh.HNASSSHBackend.cifs_share_del.assert_not_called()
def test_delete_managed_snapshot(self):
hnas_id = manage_snapshot['share_id']
self.mock_object(driver.HitachiHNASDriver, "_check_fs_mounted")
self.mock_object(ssh.HNASSSHBackend, "tree_delete")
self.mock_object(ssh.HNASSSHBackend, "delete_directory")
self.mock_object(ssh.HNASSSHBackend, "nfs_export_del")
self.mock_object(ssh.HNASSSHBackend, "cifs_share_del")
self._driver.delete_snapshot('context', manage_snapshot)
self.assertTrue(self.mock_log.debug.called)
self.assertTrue(self.mock_log.info.called)
driver.HitachiHNASDriver._check_fs_mounted.assert_called_once_with()
ssh.HNASSSHBackend.tree_delete.assert_called_once_with(
manage_snapshot['provider_location'])
ssh.HNASSSHBackend.delete_directory.assert_called_once_with(
'/snapshots/' + hnas_id)
@ddt.data(share_nfs, share_cifs)
def test_ensure_share(self, share):
result = self._driver.ensure_share('context', share)
ssh.HNASSSHBackend.check_vvol.assert_called_once_with(share['id'])
ssh.HNASSSHBackend.check_quota.assert_called_once_with(share['id'])
expected = [
self._get_export(
share['id'], share['share_proto'], self._driver.hnas_evs_ip,
False),
self._get_export(
share['id'], share['share_proto'],
self._driver.hnas_admin_network_ip, True)]
if share['share_proto'].lower() == 'nfs':
ssh.HNASSSHBackend.check_export.assert_called_once_with(
share['id'])
self.assertFalse(ssh.HNASSSHBackend.check_cifs.called)
else:
ssh.HNASSSHBackend.check_cifs.assert_called_once_with(share['id'])
self.assertFalse(ssh.HNASSSHBackend.check_export.called)
self.assertEqual(expected, result)
def test_ensure_share_invalid_protocol(self):
ex = self.assertRaises(exception.ShareBackendException,
self._driver.ensure_share, 'context',
invalid_share)
self.assertEqual(invalid_protocol_msg, ex.msg)
def test_shrink_share(self):
self.mock_object(ssh.HNASSSHBackend, "get_share_usage", mock.Mock(
return_value=10))
self.mock_object(ssh.HNASSSHBackend, "modify_quota")
self._driver.shrink_share(share_nfs, 11)
ssh.HNASSSHBackend.get_share_usage.assert_called_once_with(
share_nfs['id'])
ssh.HNASSSHBackend.modify_quota.assert_called_once_with(
share_nfs['id'], 11)
def test_shrink_share_new_size_lower_than_usage(self):
self.mock_object(ssh.HNASSSHBackend, "get_share_usage", mock.Mock(
return_value=10))
self.assertRaises(exception.ShareShrinkingPossibleDataLoss,
self._driver.shrink_share, share_nfs, 9)
ssh.HNASSSHBackend.get_share_usage.assert_called_once_with(
share_nfs['id'])
def test_extend_share(self):
self.mock_object(ssh.HNASSSHBackend, "get_stats", mock.Mock(
return_value=(500, 200, True)))
self.mock_object(ssh.HNASSSHBackend, "modify_quota")
self._driver.extend_share(share_nfs, 150)
ssh.HNASSSHBackend.get_stats.assert_called_once_with()
ssh.HNASSSHBackend.modify_quota.assert_called_once_with(
share_nfs['id'], 150)
def test_extend_share_with_no_available_space_in_fs(self):
self.mock_object(ssh.HNASSSHBackend, "get_stats", mock.Mock(
return_value=(500, 200, False)))
self.mock_object(ssh.HNASSSHBackend, "modify_quota")
self.assertRaises(exception.HNASBackendException,
self._driver.extend_share, share_nfs, 1000)
ssh.HNASSSHBackend.get_stats.assert_called_once_with()
@ddt.data(share_nfs, share_cifs)
def test_manage_existing(self, share):
expected_exports = [
self._get_export(
share['id'], share['share_proto'], self._driver.hnas_evs_ip,
False),
self._get_export(
share['id'], share['share_proto'],
self._driver.hnas_admin_network_ip, True)]
expected_out = {'size': share['size'],
'export_locations': expected_exports}
self.mock_object(ssh.HNASSSHBackend, "get_share_quota", mock.Mock(
return_value=share['size']))
out = self._driver.manage_existing(share, 'option')
self.assertEqual(expected_out, out)
ssh.HNASSSHBackend.get_share_quota.assert_called_once_with(
share['id'])
def test_manage_existing_no_quota(self):
self.mock_object(ssh.HNASSSHBackend, "get_share_quota", mock.Mock(
return_value=None))
self.assertRaises(exception.ManageInvalidShare,
self._driver.manage_existing, share_nfs, 'option')
ssh.HNASSSHBackend.get_share_quota.assert_called_once_with(
share_nfs['id'])
def test_manage_existing_wrong_share_id(self):
self.mock_object(self.fake_private_storage, 'get',
mock.Mock(return_value='Wrong_share_id'))
self.assertRaises(exception.HNASBackendException,
self._driver.manage_existing, share_nfs, 'option')
@ddt.data(':/', '1.1.1.1:/share_id', '1.1.1.1:/shares',
'1.1.1.1:shares/share_id', ':/share_id')
def test_manage_existing_wrong_path_format_nfs(self, wrong_location):
expected_exception = ("Share backend error: Incorrect path. It "
"should have the following format: "
"IP:/shares/share_id.")
self._test_manage_existing_wrong_path(
share_nfs.copy(), expected_exception, wrong_location)
@ddt.data('\\\\1.1.1.1', '1.1.1.1\\share_id', '1.1.1.1\\shares\\share_id',
'\\\\1.1.1.1\\shares\\share_id', '\\\\share_id')
def test_manage_existing_wrong_path_format_cifs(self, wrong_location):
expected_exception = ("Share backend error: Incorrect path. It should "
"have the following format: \\\\IP\\share_id.")
self._test_manage_existing_wrong_path(
share_cifs.copy(), expected_exception, wrong_location)
def _test_manage_existing_wrong_path(
self, share, expected_exception, wrong_location):
share['export_locations'] = [{'path': wrong_location}]
ex = self.assertRaises(exception.ShareBackendException,
self._driver.manage_existing, share, 'option')
self.assertEqual(expected_exception, ex.msg)
def test_manage_existing_wrong_evs_ip(self):
share_nfs['export_locations'] = [{'path': '172.24.44.189:/shares/'
'aa4a7710-f326-41fb-ad18-'}]
self.assertRaises(exception.ShareBackendException,
self._driver.manage_existing, share_nfs,
'option')
def test_manage_existing_invalid_host(self):
self.assertRaises(exception.ShareBackendException,
self._driver.manage_existing, share_invalid_host,
'option')
def test_manage_existing_invalid_protocol(self):
self.assertRaises(exception.ShareBackendException,
self._driver.manage_existing, invalid_share,
'option')
@ddt.data(True, False)
def test_unmanage(self, has_export_locations):
share_copy = share_nfs.copy()
if not has_export_locations:
share_copy['export_locations'] = []
self._driver.unmanage(share_copy)
self.assertTrue(self.fake_private_storage.delete.called)
self.assertTrue(self.mock_log.info.called)
def test_get_network_allocations_number(self):
result = self._driver.get_network_allocations_number()
self.assertEqual(0, result)
@ddt.data([share_nfs, snapshot_nfs], [share_cifs, snapshot_cifs])
@ddt.unpack
def test_create_share_from_snapshot(self, share, snapshot):
self.mock_object(driver.HitachiHNASDriver, "_check_fs_mounted",
mock.Mock())
self.mock_object(ssh.HNASSSHBackend, "vvol_create")
self.mock_object(ssh.HNASSSHBackend, "quota_add")
self.mock_object(ssh.HNASSSHBackend, "tree_clone")
self.mock_object(ssh.HNASSSHBackend, "cifs_share_add")
self.mock_object(ssh.HNASSSHBackend, "nfs_export_add")
result = self._driver.create_share_from_snapshot('context',
share,
snapshot)
ssh.HNASSSHBackend.vvol_create.assert_called_once_with(share['id'])
ssh.HNASSSHBackend.quota_add.assert_called_once_with(share['id'],
share['size'])
ssh.HNASSSHBackend.tree_clone.assert_called_once_with(
'/snapshots/' + share['id'] + '/' + snapshot['id'],
'/shares/' + share['id'])
expected = [
self._get_export(
share['id'], share['share_proto'], self._driver.hnas_evs_ip,
False),
self._get_export(
share['id'], share['share_proto'],
self._driver.hnas_admin_network_ip, True)]
if share['share_proto'].lower() == 'nfs':
ssh.HNASSSHBackend.nfs_export_add.assert_called_once_with(
share['id'])
self.assertFalse(ssh.HNASSSHBackend.cifs_share_add.called)
else:
ssh.HNASSSHBackend.cifs_share_add.assert_called_once_with(
share['id'])
self.assertFalse(ssh.HNASSSHBackend.nfs_export_add.called)
self.assertEqual(expected, result)
def test_create_share_from_snapshot_empty_snapshot(self):
self.mock_object(driver.HitachiHNASDriver, "_check_fs_mounted",
mock.Mock())
self.mock_object(ssh.HNASSSHBackend, "vvol_create")
self.mock_object(ssh.HNASSSHBackend, "quota_add")
self.mock_object(ssh.HNASSSHBackend, "tree_clone", mock.Mock(
side_effect=exception.HNASNothingToCloneException('msg')))
self.mock_object(ssh.HNASSSHBackend, "nfs_export_add")
result = self._driver.create_share_from_snapshot('context', share_nfs,
snapshot_nfs)
expected = [
self._get_export(
share_nfs['id'], share_nfs['share_proto'],
self._driver.hnas_evs_ip, False),
self._get_export(
share_nfs['id'], share_nfs['share_proto'],
self._driver.hnas_admin_network_ip, True)]
self.assertEqual(expected, result)
self.assertTrue(self.mock_log.warning.called)
ssh.HNASSSHBackend.vvol_create.assert_called_once_with(share_nfs['id'])
ssh.HNASSSHBackend.quota_add.assert_called_once_with(share_nfs['id'],
share_nfs['size'])
ssh.HNASSSHBackend.tree_clone.assert_called_once_with(
'/snapshots/' + share_nfs['id'] + '/' + snapshot_nfs['id'],
'/shares/' + share_nfs['id'])
ssh.HNASSSHBackend.nfs_export_add.assert_called_once_with(
share_nfs['id'])
def test_create_share_from_snapshot_invalid_protocol(self):
self.mock_object(driver.HitachiHNASDriver, "_check_fs_mounted",
mock.Mock())
self.mock_object(ssh.HNASSSHBackend, "vvol_create")
self.mock_object(ssh.HNASSSHBackend, "quota_add")
self.mock_object(ssh.HNASSSHBackend, "tree_clone")
ex = self.assertRaises(exception.ShareBackendException,
self._driver.create_share_from_snapshot,
'context', invalid_share, snapshot_nfs)
self.assertEqual(invalid_protocol_msg, ex.msg)
def test_create_share_from_snapshot_cleanup(self):
dest_path = '/snapshots/' + share_nfs['id'] + '/' + snapshot_nfs['id']
src_path = '/shares/' + share_nfs['id']
self.mock_object(driver.HitachiHNASDriver, "_check_fs_mounted",
mock.Mock())
self.mock_object(ssh.HNASSSHBackend, "vvol_create")
self.mock_object(ssh.HNASSSHBackend, "quota_add")
self.mock_object(ssh.HNASSSHBackend, "tree_clone")
self.mock_object(ssh.HNASSSHBackend, "vvol_delete")
self.mock_object(ssh.HNASSSHBackend, "nfs_export_add", mock.Mock(
side_effect=exception.HNASBackendException(
msg='Error adding nfs export.')))
self.assertRaises(exception.HNASBackendException,
self._driver.create_share_from_snapshot,
'context', share_nfs, snapshot_nfs)
ssh.HNASSSHBackend.vvol_create.assert_called_once_with(
share_nfs['id'])
ssh.HNASSSHBackend.quota_add.assert_called_once_with(
share_nfs['id'], share_nfs['size'])
ssh.HNASSSHBackend.tree_clone.assert_called_once_with(
dest_path, src_path)
ssh.HNASSSHBackend.nfs_export_add.assert_called_once_with(
share_nfs['id'])
ssh.HNASSSHBackend.vvol_delete.assert_called_once_with(
share_nfs['id'])
def test__check_fs_mounted(self):
self._driver._check_fs_mounted()
ssh.HNASSSHBackend.check_fs_mounted.assert_called_once_with()
def test__check_fs_mounted_not_mounted(self):
self.mock_object(ssh.HNASSSHBackend, 'check_fs_mounted', mock.Mock(
return_value=False))
self.assertRaises(exception.HNASBackendException,
self._driver._check_fs_mounted)
ssh.HNASSSHBackend.check_fs_mounted.assert_called_once_with()
def test__update_share_stats(self):
fake_data = {
'share_backend_name': self._driver.backend_name,
'driver_handles_share_servers':
self._driver.driver_handles_share_servers,
'vendor_name': 'Hitachi',
'driver_version': '4.0.0',
'storage_protocol': 'NFS_CIFS',
'total_capacity_gb': 1000,
'free_capacity_gb': 200,
'reserved_percentage': driver.CONF.reserved_share_percentage,
'qos': False,
'thin_provisioning': True,
'dedupe': True,
'revert_to_snapshot_support': True,
'mount_snapshot_support': True,
}
self.mock_object(ssh.HNASSSHBackend, 'get_stats', mock.Mock(
return_value=(1000, 200, True)))
self.mock_object(driver.HitachiHNASDriver, "_check_fs_mounted",
mock.Mock())
self.mock_object(manila.share.driver.ShareDriver,
'_update_share_stats')
self._driver._update_share_stats()
self.assertTrue(self._driver.hnas.get_stats.called)
(manila.share.driver.ShareDriver._update_share_stats.
assert_called_once_with(fake_data))
self.assertTrue(self.mock_log.info.called)
@ddt.data(snapshot_nfs, snapshot_cifs,
snapshot_mount_support_nfs, snapshot_mount_support_cifs)
def test_ensure_snapshot(self, snapshot):
result = self._driver.ensure_snapshot('context', snapshot)
if snapshot['share'].get('mount_snapshot_support'):
expected = [
self._get_export(
snapshot['id'], snapshot['share']['share_proto'],
self._driver.hnas_evs_ip, False, is_snapshot=True),
self._get_export(
snapshot['id'], snapshot['share']['share_proto'],
self._driver.hnas_admin_network_ip, True,
is_snapshot=True)]
if snapshot['share']['share_proto'].lower() == 'nfs':
ssh.HNASSSHBackend.check_export.assert_called_once_with(
snapshot['id'], is_snapshot=True)
self.assertFalse(ssh.HNASSSHBackend.check_cifs.called)
else:
ssh.HNASSSHBackend.check_cifs.assert_called_once_with(
snapshot['id'])
self.assertFalse(ssh.HNASSSHBackend.check_export.called)
else:
expected = None
ssh.HNASSSHBackend.check_directory.assert_called_once_with(
snapshot['provider_location'])
self.assertEqual(expected, result)
def test_manage_existing_snapshot(self):
self.mock_object(ssh.HNASSSHBackend, 'check_directory',
mock.Mock(return_value=True))
self.mock_object(self._driver, '_ensure_snapshot',
mock.Mock(return_value=[]))
path_info = manage_snapshot['provider_location'].split('/')
hnas_snapshot_id = path_info[3]
out = self._driver.manage_existing_snapshot(manage_snapshot,
{'size': 20})
ssh.HNASSSHBackend.check_directory.assert_called_with(
'/snapshots/aa4a7710-f326-41fb-ad18-b4ad587fc87a'
'/snapshot18-05-2106')
self._driver._ensure_snapshot.assert_called_with(
manage_snapshot,
hnas_snapshot_id)
self.assertEqual(20, out['size'])
self.assertTrue(self.mock_log.debug.called)
self.assertTrue(self.mock_log.info.called)
@ddt.data(None, exception.HNASItemNotFoundException('Fake error.'))
def test_manage_existing_snapshot_with_mount_support(self, exc):
export_locations = [{
'path': '172.24.44.10:/snapshots/'
'3377b015-a695-4a5a-8aa5-9b931b023380'}]
self.mock_object(ssh.HNASSSHBackend, 'check_directory',
mock.Mock(return_value=True))
self.mock_object(self._driver, '_ensure_snapshot',
mock.Mock(return_value=[], side_effect=exc))
self.mock_object(self._driver, '_get_export_locations',
mock.Mock(return_value=export_locations))
if exc:
self.mock_object(self._driver, '_create_export')
path_info = snapshot_mount_support_nfs['provider_location'].split('/')
hnas_snapshot_id = path_info[3]
out = self._driver.manage_existing_snapshot(
snapshot_mount_support_nfs,
{'size': 20, 'export_locations': export_locations})
ssh.HNASSSHBackend.check_directory.assert_called_with(
'/snapshots/62125744-fcdd-4f55-a8c1-d1498102f634'
'/3377b015-a695-4a5a-8aa5-9b931b023380')
self._driver._ensure_snapshot.assert_called_with(
snapshot_mount_support_nfs,
hnas_snapshot_id)
self._driver._get_export_locations.assert_called_with(
snapshot_mount_support_nfs['share']['share_proto'],
hnas_snapshot_id,
is_snapshot=True)
if exc:
self._driver._create_export.assert_called_with(
snapshot_mount_support_nfs['share_id'],
snapshot_mount_support_nfs['share']['share_proto'],
snapshot_id=hnas_snapshot_id)
self.assertEqual(20, out['size'])
self.assertEqual(export_locations, out['export_locations'])
self.assertTrue(self.mock_log.debug.called)
self.assertTrue(self.mock_log.info.called)
@ddt.data('fake_size', '128GB', '512 GB', {'size': 128})
def test_manage_snapshot_invalid_size_exception(self, size):
self.assertRaises(exception.ManageInvalidShareSnapshot,
self._driver.manage_existing_snapshot,
manage_snapshot, {'size': size})
def test_manage_snapshot_size_not_provided_exception(self):
self.assertRaises(exception.ManageInvalidShareSnapshot,
self._driver.manage_existing_snapshot,
manage_snapshot, {})
@ddt.data('/root/snapshot_id', '/snapshots/share1/snapshot_id',
'/directory1', 'snapshots/share1/snapshot_id')
def test_manage_snapshot_invalid_path_exception(self, path):
snap_copy = manage_snapshot.copy()
snap_copy['provider_location'] = path
self.assertRaises(exception.ManageInvalidShareSnapshot,
self._driver.manage_existing_snapshot,
snap_copy, {'size': 20})
self.assertTrue(self.mock_log.debug.called)
def test_manage_inexistent_snapshot_exception(self):
self.mock_object(ssh.HNASSSHBackend, 'check_directory',
mock.Mock(return_value=False))
self.assertRaises(exception.ManageInvalidShareSnapshot,
self._driver.manage_existing_snapshot,
manage_snapshot, {'size': 20})
self.assertTrue(self.mock_log.debug.called)
def test_unmanage_snapshot(self):
self._driver.unmanage_snapshot(snapshot_nfs)
self.assertTrue(self.mock_log.info.called)
@ddt.data({'snap': snapshot_nfs, 'exc': None},
{'snap': snapshot_cifs, 'exc': None},
{'snap': snapshot_nfs,
'exc': exception.HNASNothingToCloneException('fake')},
{'snap': snapshot_cifs,
'exc': exception.HNASNothingToCloneException('fake')})
@ddt.unpack
def test_revert_to_snapshot(self, exc, snap):
self.mock_object(driver.HitachiHNASDriver, "_check_fs_mounted")
self.mock_object(ssh.HNASSSHBackend, 'tree_delete')
self.mock_object(ssh.HNASSSHBackend, 'vvol_create')
self.mock_object(ssh.HNASSSHBackend, 'quota_add')
self.mock_object(ssh.HNASSSHBackend, 'tree_clone',
mock.Mock(side_effect=exc))
self._driver.revert_to_snapshot('context', snap, None, None)
driver.HitachiHNASDriver._check_fs_mounted.assert_called_once_with()
ssh.HNASSSHBackend.tree_delete.assert_called_once_with(
'/'.join(('/shares', snap['share_id'])))
ssh.HNASSSHBackend.vvol_create.assert_called_once_with(
snap['share_id'])
ssh.HNASSSHBackend.quota_add.assert_called_once_with(
snap['share_id'], 2)
ssh.HNASSSHBackend.tree_clone.assert_called_once_with(
'/'.join(('/snapshots', snap['share_id'], snap['id'])),
'/'.join(('/shares', snap['share_id'])))
ssh.HNASSSHBackend.check_directory.assert_called_once_with(
snap['provider_location'])
if exc:
self.assertTrue(self.mock_log.warning.called)
self.assertTrue(self.mock_log.info.called)
def test_nfs_snapshot_update_access_allow(self):
access1 = {
'access_type': 'ip',
'access_to': '172.24.10.10',
}
access2 = {
'access_type': 'ip',
'access_to': '172.31.20.20',
}
access_list = [access1, access2]
self.mock_object(ssh.HNASSSHBackend, "update_nfs_access_rule")
self._driver.snapshot_update_access('ctxt', snapshot_nfs, access_list,
access_list, [])
ssh.HNASSSHBackend.update_nfs_access_rule.assert_called_once_with(
[access1['access_to'] + '(ro)', access2['access_to'] + '(ro)'],
snapshot_id=snapshot_nfs['id'])
ssh.HNASSSHBackend.check_directory.assert_called_once_with(
snapshot_nfs['provider_location'])
self.assertTrue(self.mock_log.debug.called)
def test_nfs_snapshot_update_access_deny(self):
access1 = {
'access_type': 'ip',
'access_to': '172.24.10.10',
}
self.mock_object(ssh.HNASSSHBackend, "update_nfs_access_rule")
self._driver.snapshot_update_access('ctxt', snapshot_nfs, [],
[], [access1])
ssh.HNASSSHBackend.update_nfs_access_rule.assert_called_once_with(
[], snapshot_id=snapshot_nfs['id'])
ssh.HNASSSHBackend.check_directory.assert_called_once_with(
snapshot_nfs['provider_location'])
self.assertTrue(self.mock_log.debug.called)
def test_nfs_snapshot_update_access_invalid_access_type(self):
access1 = {
'access_type': 'user',
'access_to': 'user1',
}
self.assertRaises(exception.InvalidSnapshotAccess,
self._driver.snapshot_update_access, 'ctxt',
snapshot_nfs, [access1], [], [])
ssh.HNASSSHBackend.check_directory.assert_called_once_with(
snapshot_nfs['provider_location'])
def test_cifs_snapshot_update_access_allow(self):
access1 = {
'access_type': 'user',
'access_to': 'fake_user1',
}
self.mock_object(ssh.HNASSSHBackend, 'cifs_allow_access')
self._driver.snapshot_update_access('ctxt', snapshot_cifs, [access1],
[access1], [])
ssh.HNASSSHBackend.cifs_allow_access.assert_called_with(
snapshot_cifs['id'], access1['access_to'], 'ar', is_snapshot=True)
ssh.HNASSSHBackend.check_directory.assert_called_once_with(
snapshot_cifs['provider_location'])
self.assertTrue(self.mock_log.debug.called)
def test_cifs_snapshot_update_access_deny(self):
access1 = {
'access_type': 'user',
'access_to': 'fake_user1',
}
self.mock_object(ssh.HNASSSHBackend, 'cifs_deny_access')
self._driver.snapshot_update_access('ctxt', snapshot_cifs, [], [],
[access1])
ssh.HNASSSHBackend.cifs_deny_access.assert_called_with(
snapshot_cifs['id'], access1['access_to'], is_snapshot=True)
ssh.HNASSSHBackend.check_directory.assert_called_once_with(
snapshot_cifs['provider_location'])
self.assertTrue(self.mock_log.debug.called)
def test_cifs_snapshot_update_access_recovery_mode(self):
access1 = {
'access_type': 'user',
'access_to': 'fake_user1',
}
access2 = {
'access_type': 'user',
'access_to': 'HDS\\fake_user2',
}
access_list = [access1, access2]
permission_list = [('fake_user1', 'ar'), ('HDS\\fake_user2', 'ar')]
formatted_user = r'"\{1}{0}\{1}"'.format(access2['access_to'], '"')
self.mock_object(ssh.HNASSSHBackend, 'list_cifs_permissions',
mock.Mock(return_value=permission_list))
self.mock_object(ssh.HNASSSHBackend, 'cifs_deny_access')
self.mock_object(ssh.HNASSSHBackend, 'cifs_allow_access')
self._driver.snapshot_update_access('ctxt', snapshot_cifs, access_list,
[], [])
ssh.HNASSSHBackend.list_cifs_permissions.assert_called_once_with(
snapshot_cifs['id'])
ssh.HNASSSHBackend.cifs_deny_access.assert_called_with(
snapshot_cifs['id'], formatted_user, is_snapshot=True)
ssh.HNASSSHBackend.cifs_allow_access.assert_called_with(
snapshot_cifs['id'], access2['access_to'].replace('\\', '\\\\'),
'ar', is_snapshot=True)
ssh.HNASSSHBackend.check_directory.assert_called_once_with(
snapshot_cifs['provider_location'])
self.assertTrue(self.mock_log.debug.called)
| 42.236078 | 79 | 0.624705 | 47,348 | 0.879241 | 0 | 0 | 47,357 | 0.879408 | 0 | 0 | 11,584 | 0.215112 |
931acb0aff55dcc9d21d89e7ab39172fc95002c7 | 2,978 | py | Python | sloth/test/dummy_data.py | maurov/xraysloth | 6f18ddcb02050431574693d46bcf4b89c719c40b | [
"BSD-3-Clause"
] | 4 | 2015-07-03T09:38:58.000Z | 2022-03-16T11:09:49.000Z | sloth/test/dummy_data.py | maurov/xraysloth | 6f18ddcb02050431574693d46bcf4b89c719c40b | [
"BSD-3-Clause"
] | null | null | null | sloth/test/dummy_data.py | maurov/xraysloth | 6f18ddcb02050431574693d46bcf4b89c719c40b | [
"BSD-3-Clause"
] | 2 | 2017-05-22T17:14:11.000Z | 2017-07-04T04:52:08.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Generate dummy data for tests/examples
"""
import numpy as np
def dummy_gauss_image(x=None, y=None,
xhalfrng=1.5, yhalfrng=None, xcen=0.5, ycen=0.9,
xnpts=1024, ynpts=None, xsigma=0.55, ysigma=0.25,
noise=0.3):
"""Create a dummy 2D Gaussian image with noise
Parameters
----------
x, y : 1D arrays (optional)
arrays where to generate the image [None -> generated]
xhalfrng : float (optional)
half range of the X axis [1.5]
yhalfrng : float or None (optional)
half range of the Y axis [None -> xhalfrng]
xcen : float (optional)
X center [0.5]
ycen : float (optional)
Y center [0.9]
xnpts : int (optional)
number of points X [1024]
ynpts : int or None (optional)
number of points Y [None -> xnpts]
xsigma : float (optional)
sigma X [0.55]
ysigma : float (optional)
sigma Y [0.25]
noise : float (optional)
random noise level between 0 and 1 [0.3]
Returns
-------
x, y : 1D arrays
signal : 2D array
"""
if yhalfrng is None:
yhalfrng = xhalfrng
if ycen is None:
ycen = xcen
if ynpts is None:
ynpts = xnpts
if x is None:
x = np.linspace(xcen-xhalfrng, xcen+xhalfrng, xnpts)
if y is None:
y = np.linspace(ycen-yhalfrng, ycen+yhalfrng, ynpts)
xx, yy = np.meshgrid(x, y)
signal = np.exp(-((xx-xcen)**2 / (2*xsigma**2) +
((yy-ycen)**2 / (2*ysigma**2))))
# add noise
signal += noise * np.random.random(size=signal.shape)
return x, y, signal
def dummy_gauss_curve(xhalfrng=15, xcen=5, xnpts=512, xsigma=0.65, noise=0.3):
"""Create a dummy 1D Gaussian curve with noise
Parameters
----------
xhalfrng : float (optional)
half range of the X axis [1.5]
xcen : float (optional)
X center [0.5]
xnpts : int (optional)
number of points X [1024]
xsigma : float (optional)
sigma X [0.55]
noise : float (optional)
random noise level between 0 and 1 [0.3]
Returns
-------
x, signal : 1D arrays
"""
x = np.linspace(xcen-xhalfrng, xcen+xhalfrng, xnpts)
signal = np.exp(-((x-xcen)**2 / (2*xsigma**2)))
# add noise
signal += noise * np.random.random(size=signal.shape)
return x, signal
def main():
"""Show two plot windows with dummy data"""
from silx import sx
sx.enable_gui()
from sloth.gui.plot.plot1D import Plot1D
from sloth.gui.plot.plot2D import Plot2D
p1 = Plot1D()
p2 = Plot2D()
x, y = dummy_gauss_curve()
p1.addCurve(x, y, legend="test dummy Gaussian with noise")
p1.show()
x, y, signal = dummy_gauss_image()
p2.addImage(signal, x=x, y=y, legend="test dummy image")
p2.show()
input("Press enter to close windows")
if __name__ == '__main__':
main()
| 28.09434 | 78 | 0.576897 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,530 | 0.513768 |
931b5bb2e3fc783e728514ebc6699f871cd2d505 | 159 | py | Python | approximate_equilibrium/optimize/__init__.py | NREL/EMISApproximateEquilibrium.jl | a94ec58e812496cec3cb40bdb84b14229c1b6440 | [
"BSD-3-Clause"
] | 1 | 2021-12-04T06:37:50.000Z | 2021-12-04T06:37:50.000Z | approximate_equilibrium/optimize/__init__.py | NREL/EMISApproximateEquilibrium.jl | a94ec58e812496cec3cb40bdb84b14229c1b6440 | [
"BSD-3-Clause"
] | null | null | null | approximate_equilibrium/optimize/__init__.py | NREL/EMISApproximateEquilibrium.jl | a94ec58e812496cec3cb40bdb84b14229c1b6440 | [
"BSD-3-Clause"
] | null | null | null | from approximate_equilibrium.optimize.optimization import de_optimizer, objective_function, brute_force_optimizer, objective_function_iccn, gradient_optimizer
| 79.5 | 158 | 0.91195 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
931b8e121065b6e7d0568d8a4e43d037da1b3b77 | 4,295 | py | Python | dynatrace/environment_v2/networkzones.py | hashmibilaldt/api-client-python | 910008a4c6800a547535acba79b7a49e540b5d16 | [
"Apache-2.0"
] | null | null | null | dynatrace/environment_v2/networkzones.py | hashmibilaldt/api-client-python | 910008a4c6800a547535acba79b7a49e540b5d16 | [
"Apache-2.0"
] | null | null | null | dynatrace/environment_v2/networkzones.py | hashmibilaldt/api-client-python | 910008a4c6800a547535acba79b7a49e540b5d16 | [
"Apache-2.0"
] | null | null | null | """
Copyright 2021 Dynatrace LLC
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from datetime import datetime
from dynatrace.dynatrace_object import DynatraceObject
from typing import List, Optional, Union, Dict, Any
from dynatrace.environment_v2.schemas import EntityType, ManagementZone
from dynatrace.http_client import HttpClient
from dynatrace.pagination import PaginatedList
class NetworkZoneService:
ENDPOINT = "/api/v2/networkZones"
ENDPOINT_GLOBALSETTINGS = "/api/v2/networkZoneSettings"
def __init__(self, http_client: HttpClient):
self.__http_client = http_client
def list(self) -> PaginatedList["NetworkZone"]:
""" Lists all network zones. No params
:return: a list of Network Zones with details
"""
return PaginatedList(NetworkZone, self.__http_client, target_url=self.ENDPOINT, list_item="networkZones")
def get(self, networkzone_id: str):
""" Gets parameters of specified network zone
:param networkzone_id: the ID of the network zone
:return: a Network Zone + details
"""
response = self.__http_client.make_request(f"{self.ENDPOINT}/{networkzone_id}").json()
return NetworkZone(raw_element=response)
def update(self, networkzone_id: str, alternate_zones: Optional[List[str]] = None, description: Optional[str] = None):
""" Updates an existing network zone or creates a new one
:param networkzone_id: the ID of the network zone, if none exists, will create
:param alternate_zones: optional list of text body of alternative network zones
:param description: optional text body for short description of network zone
:return: HTTP response
"""
params = {"alternativeZones": alternate_zones, "description": description}
return self.__http_client.make_request(path=f"{self.ENDPOINT}/{networkzone_id}", params=params, method="PUT")
def delete(self, networkzone_id: str):
""" Deletes the specified network zone
:param networkzone_id: the ID of the network zone
:return: HTTP response
"""
return self.__http_client.make_request(path=f"{self.ENDPOINT}/{networkzone_id}", method="DELETE")
def getGlobalConfig(self):
""" Gets the global configuration of network zones. No params
:return: Network Zone Global Settings object
"""
response = self.__http_client.make_request(path=self.ENDPOINT_GLOBALSETTINGS).json()
return NetworkZoneSettings(raw_element=response)
def updateGlobalConfig(self, configuration: bool):
""" Updates the global configuration of network zones.
:param configuration: boolean setting to enable/disable NZs
:return: HTTP response
"""
params = {"networkZonesEnabled": configuration}
return self.__http_client.make_request(path=self.ENDPOINT_GLOBALSETTINGS, method="PUT", params=params)
class NetworkZone(DynatraceObject):
def _create_from_raw_data(self, raw_element: Dict[str, Any]):
self.id: str = raw_element.get("id")
self.description: str = raw_element.get("description")
self.alternative_zones: List[str] = raw_element.get("alternativeZones")
self.num_oneagents_using: int = raw_element.get("numOfOneAgentsUsing")
self.num_oneagents_configured: int = raw_element.get("numOfConfiguredOneAgents")
self.num_oneagents_from_other_zones: int = raw_element.get("numOfOneAgentsFromOtherZones")
self.num_configured_activegates: int = raw_element.get("numOfConfiguredActiveGates")
class NetworkZoneSettings(DynatraceObject):
def _create_from_raw_data(self, raw_element: Dict[str, bool]):
self.network_zones_enabled: bool = raw_element.get("networkZonesEnabled") | 40.518868 | 122 | 0.723632 | 3,421 | 0.796508 | 0 | 0 | 0 | 0 | 0 | 0 | 2,036 | 0.47404 |
931c579da4bce33b29c5c9ae99f9be7566e00afb | 2,984 | py | Python | tools/scruffy/checkers/orgs.py | paultag/pupa | 137293925503496e15137540e049bf544e129971 | [
"BSD-3-Clause"
] | null | null | null | tools/scruffy/checkers/orgs.py | paultag/pupa | 137293925503496e15137540e049bf544e129971 | [
"BSD-3-Clause"
] | null | null | null | tools/scruffy/checkers/orgs.py | paultag/pupa | 137293925503496e15137540e049bf544e129971 | [
"BSD-3-Clause"
] | null | null | null | from .. import Check
from .common import common_checks
def check(db):
for org in db.organizations.find({"classification": "legislature"}):
for check in common_checks(org, 'organization', 'organizations'):
yield check
jid = org.get('jurisdiction_id')
if jid is None:
yield Check(collection='organizations',
id=org['_id'],
tagname='org-has-no-jurisdiction',
severity='critical')
continue
jorgs = list(db.organizations.find({
"classification": "legislature",
"jurisdiction_id": org['jurisdiction_id']
}))
if len(jorgs) != 1 and len(set([x['chamber']
for x in jorgs])) != len(jorgs):
yield Check(collection='organizations',
id=org['_id'],
tagname='jurisdiction_id-has-duped-orgs-by-chamber',
severity='critical',
data=[[x['chamber'], x['_id']] for x in jorgs
if x['chamber'] == org['chamber']])
if org.get('parent_id'):
yield Check(collection='organizations',
id=org['_id'],
tagname='jurisdiction-has-a-parent',
severity='important')
if "/" not in jid:
yield Check(collection='organizations',
id=org['_id'],
tagname='jurisdiction-has-no-slashes',
severity='grave')
else:
prefix, uid = jid.split("/", 1)
uid, what = uid.rsplit("/", 1)
if prefix != 'ocd-jurisdiction':
yield Check(collection='organizations',
id=org['_id'],
tagname='org-has-bad-jurisdiction-id-prefix',
severity='critical')
if ":" in what:
yield Check(collection='organizations',
id=org['_id'],
tagname='org-has-malformed-jurisdiction-id-ender',
severity='critical')
kvp = [f.split(":") for f in uid.split("/")]
if any((len(x) != 2) for x in kvp):
yield Check(collection='organizations',
id=org['_id'],
tagname='org-has-malformed-jurisdiction-id-path',
severity='critical')
for org in db.organizations.find({"classification": "party"}):
if 'jurisdiction_id' in org and org['jurisdiction_id']:
yield Check(collection='organizations',
id=org['_id'],
tagname='party-has-jurisdiction-id',
severity='critical',
data={
"jurisdiction_id": org['jurisdiction_id']
})
| 40.324324 | 76 | 0.464142 | 0 | 0 | 2,926 | 0.980563 | 0 | 0 | 0 | 0 | 823 | 0.275804 |
931d21150c4fef29838bf9c36548a5b31d001040 | 5,165 | py | Python | ip_allow_lists/ip_compare.py | PaloAltoNetworks/pcs-migration-management | 766c8c861befa92e593b23ad6d248e33f62054bb | [
"ISC"
] | 1 | 2022-03-17T12:51:45.000Z | 2022-03-17T12:51:45.000Z | ip_allow_lists/ip_compare.py | PaloAltoNetworks/pcs-migration-management | 766c8c861befa92e593b23ad6d248e33f62054bb | [
"ISC"
] | 2 | 2021-11-03T15:34:40.000Z | 2021-12-14T19:50:20.000Z | ip_allow_lists/ip_compare.py | PaloAltoNetworks/pcs-migration-management | 766c8c861befa92e593b23ad6d248e33f62054bb | [
"ISC"
] | 4 | 2021-11-09T17:57:01.000Z | 2022-01-24T17:41:21.000Z | from sdk.color_print import c_print
from tqdm import tqdm
#Migrate
def compare_trusted_networks(source_networks, clone_networks):
'''
Accepts the source trusted alert network list and a clone trusted alert network list.
Compares the source tenants network list to a clone tenant networks list.
'''
networks_delta = []
for src_network in source_networks:
if src_network.get('name') not in [network.get('name') for network in clone_networks]:
networks_delta.append(src_network)
return networks_delta
#Sync
def compare_each_network_cidr_and_add(session, source_networks, clone_networks, logger):
added = 0
#Define lists
for src_network in source_networks:
#Check if all cidr blocks are present
new_network = [network for network in clone_networks if network.get('name') == src_network.get('name')][0]
if not new_network:
#network would have just been added, can't update it here
break
cidr_to_add = []
for cidr in src_network.get('cidrs'):
if cidr.get('cidr') not in [n_cidr.get('cidr') for n_cidr in new_network.get('cidrs')]:
cidr_to_add.append(cidr)
net_name = src_network.get('name')
for cidr in tqdm(cidr_to_add, desc='Adding CIDRs', leave=False):
networkUuid = new_network.get('uuid')
logger.debug(f'API - Adding CIDRs to network {net_name}')
session.request('POST', f'/allow_list/network/{networkUuid}/cidr', json=cidr)
added += 1
return added
#Sync
def compare_each_network_cidr_and_update(session, source_networks, clone_networks, logger):
updated = 0
for src_network in source_networks:
for cln_network in clone_networks:
#Check if all cidr blocks are present
cidrs_to_update = []
for s_cidr in src_network.get('cidrs'):
for c_cidr in cln_network.get('cidrs'):
if s_cidr.get('cidr') == c_cidr.get('cidr') and s_cidr.get('description', '') != c_cidr.get('description', ''):
c_cidr.update(description=s_cidr['description'])
cidrs_to_update.append(c_cidr)
for cidr in tqdm(cidrs_to_update, desc='Updating CIDRs', leave=False):
networkUuid = cln_network.get('uuid')
name = cln_network.get('name')
c_id = cidr.get('uuid')
logger.debug(f'API - Updating CIDR on network {name}')
session.request('PUT', f'/allow_list/network/{networkUuid}/cidr/{c_id}', json=cidr)
updated += 1
return updated
#Sync
def compare_each_network_cidr_and_delete(session, source_networks, clone_networks, logger):
deleted += 1
networks_delta = []
for src_network in source_networks:
for cln_network in clone_networks:
if src_network.get('name') ==cln_network.get('name'):
name = src_network.get('name')
networkUuid =cln_network.get('uuid')
#Get cidr that needs to be deleted
cidrs_to_delete = []
for c_cidr in cln_network.get('cidrs'):
if c_cidr.get('cidr') not in [ci.get('cidr') for ci in src_network.get('cidrs')]:
cidrs_to_delete.append(c_cidr) #We need the cidr and the uuid for deletion
#Delete the cidrs from the destination tenant
for cidr in tqdm(cidrs_to_delete, desc='Deleting CIDRs', leave=False):
cidrUuid = cidr.get('uuid')
logger.debug(f'API - Deleting CIDR from network: \'{name}\'')
session.request('DELETE', f'/allow_list/network/{networkUuid}/cidr/{cidrUuid}')
deleted += 1
#Sync
# def compare_cidr_lists(src_cidrs: list, cln_cidrs: list):
# cidrs_to_add = []
# for cidr in src_cidrs['cidr']:
# if cidr not in cln_cidrs['cidr']:
# cidrs_to_add.append(cidr)
# return cidrs_to_add
def compare_login_ips(src_logins, cln_logins):
'''
Accepts the list of src trusted logins and a list of clone trusted logins.
Returns the lists of trusted logins found in the source that are not found in the clone.
'''
ips_delta = []
for src_network in src_logins:
if src_network.get('name') not in [network.get('name') for network in cln_logins]:
ips_delta.append(src_network)
return ips_delta
#Sync
def compare_each_login_ip(src_login_ips, cln_login_ips):
ips_delta = []
for src_network in src_login_ips:
for cln_network in cln_login_ips:
if src_network.get('cidr') != cln_network.get('cidr') or src_network.get('description') != cln_network.get('description'):
ips_delta.append(src_network)
return ips_delta
#Sync
def compare_login_ip_to_delete(src_login_ips, cln_login_ips):
ips_delta = []
for cln_network in cln_login_ips:
if cln_network.get('name') not in [o_network.get('name') for o_network in src_login_ips]:
ips_delta.append(cln_network)
return ips_delta | 40.992063 | 134 | 0.634656 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,496 | 0.289642 |
931dd502335fb2a0b22db32c479212aa84cc2649 | 4,004 | py | Python | lib/surface/source/captures/upload.py | bopopescu/SDK | e6d9aaee2456f706d1d86e8ec2a41d146e33550d | [
"Apache-2.0"
] | null | null | null | lib/surface/source/captures/upload.py | bopopescu/SDK | e6d9aaee2456f706d1d86e8ec2a41d146e33550d | [
"Apache-2.0"
] | null | null | null | lib/surface/source/captures/upload.py | bopopescu/SDK | e6d9aaee2456f706d1d86e8ec2a41d146e33550d | [
"Apache-2.0"
] | 2 | 2020-11-04T03:08:21.000Z | 2020-11-05T08:14:41.000Z | # Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Capture a project repository.
TODO(user) make capture a group with "create", "list", etc.
"""
import json
import os
from googlecloudsdk.api_lib.source import capture
from googlecloudsdk.calliope import base
from googlecloudsdk.core import log
class Upload(base.Command):
"""Upload a source capture from given input files."""
detailed_help = {
'DESCRIPTION': """\
This command uploads a capture of the specified source directory to
a Google-hosted Git repository accessible with the current project's
credentials. If the name of an existing capture is provided, the
existing capture will be modified to include the new files.
Otherwise a new capture will be created to hold the files.
When creating a capture, this command can also produce a source
context json file describing the capture.
See https://cloud.google.com/tools/cloud-debugger/ for details on
where to deploy the source context json file in order to enable
Cloud Diagnostic tools to display the captured sources.
"""
}
@staticmethod
def Args(parser):
parser.add_argument(
'source_location', metavar='PATH',
help="""\
The directory or archive containing the sources to capture. Files
and subdirectories contained in that directory or archive will be
added to the capture. If PATH refers to a file, the file may be
a Java source jar or a zip archive.
""")
parser.add_argument(
'--capture-id', metavar='ID',
completion_resource='source.captures',
help="""\
The ID of the capture to create or modify.
""")
parser.add_argument(
'--target-path', metavar='PATH', default='',
help="""\
The directory tree under source-location will be uploaded under
target-path in the capture's directory tree.
""")
parser.add_argument(
'--context-file', metavar='json-file-name',
help="""\
The name of the source context json file to produce. Defaults to
source-contexts.json in the current directory. If context-file names
a directory, the output file will be source-contexts.json in that
directory.
""")
def Run(self, args):
"""Run the capture upload command."""
mgr = capture.CaptureManager()
result = mgr.UploadCapture(args.capture_id, args.source_location,
args.target_path)
if args.context_file:
if os.path.isdir(args.context_file):
json_filename = os.path.join(args.context_file, 'source-contexts.json')
else:
json_filename = args.context_file
else:
json_filename = 'source-contexts.json'
with open(json_filename, 'w') as source_context_file:
json.dump(result['source_contexts'], source_context_file)
log.Print('Created context file {0}\n'.format(json_filename))
return result
def Display(self, args, result):
"""This method is called to print the result of the Run() method.
Args:
args: The arguments that command was run with.
result: The value returned from the Run() method.
"""
log.Print(
('Created source capture {capture.id}.\n'
'Wrote {files_written} files, {size_written} bytes.\n').
format(**result))
| 37.074074 | 80 | 0.665085 | 3,157 | 0.788462 | 0 | 0 | 1,217 | 0.303946 | 0 | 0 | 2,795 | 0.698052 |
931f2ac2304379a993c5a0b48864b82b64c72225 | 7,047 | py | Python | pyk/src/pyk/tests/test_kcfg.py | runtimeverification/k | 044c49e535516c1d142206795bf1d8b2c0fcbf0d | [
"BSD-3-Clause"
] | 23 | 2017-02-26T10:49:29.000Z | 2022-03-22T20:32:10.000Z | pyk/src/pyk/tests/test_kcfg.py | runtimeverification/k | 044c49e535516c1d142206795bf1d8b2c0fcbf0d | [
"BSD-3-Clause"
] | 112 | 2016-01-20T16:23:47.000Z | 2022-03-31T21:54:59.000Z | pyk/src/pyk/tests/test_kcfg.py | runtimeverification/k | 044c49e535516c1d142206795bf1d8b2c0fcbf0d | [
"BSD-3-Clause"
] | 6 | 2016-02-23T19:12:26.000Z | 2022-03-31T01:53:24.000Z | from typing import Any, Dict, List, Tuple
from unittest import TestCase
from ..cterm import CTerm
from ..kast import TRUE, KApply, KInner, KVariable
from ..kcfg import KCFG
from ..prelude import token
def nid(i: int) -> str:
return node(i).id
# over 10 is variables
def term(i: int) -> CTerm:
inside: KInner = token(i)
if i > 10:
inside = KVariable('V' + str(i))
return CTerm(KApply('<top>', [inside]))
def node(i: int) -> KCFG.Node:
return KCFG.Node(term(i))
def edge(i: int, j: int) -> KCFG.Edge:
return KCFG.Edge(node(i), node(j), TRUE, 1)
def node_dicts(n: int) -> List[Dict[str, Any]]:
return [node(i).to_dict() for i in range(n)]
def edge_dicts(*edges: Tuple[int, int]) -> List[Dict[str, Any]]:
return [
{'source': nid(i), 'target': nid(j), 'condition': TRUE.to_dict(), 'depth': 1}
for i, j in edges
]
def cover_dicts(*edges: Tuple[int, int]) -> List[Dict[str, Any]]:
return [
{'source': nid(i), 'target': nid(j), 'condition': TRUE.to_dict(), 'depth': 1}
for i, j in edges
]
class KCFGTestCase(TestCase):
def test_from_dict_single_node(self):
# Given
d = {'nodes': node_dicts(1)}
# When
cfg = KCFG.from_dict(d)
# Then
self.assertSetEqual(set(cfg.nodes), {node(0)})
self.assertDictEqual(cfg.to_dict(), d)
def test_from_dict_two_nodes(self):
# Given
d = {'nodes': node_dicts(2)}
# When
cfg = KCFG.from_dict(d)
# Then
self.assertSetEqual(set(cfg.nodes), {node(0), node(1)})
def test_from_dict_loop_edge(self):
# Given
d = {'nodes': node_dicts(1), 'edges': edge_dicts((0, 0))}
# When
cfg = KCFG.from_dict(d)
# Then
self.assertSetEqual(set(cfg.nodes), {node(0)})
self.assertSetEqual(set(cfg.edges()), {edge(0, 0)})
self.assertEqual(cfg.edge(nid(0), nid(0)), edge(0, 0))
self.assertDictEqual(cfg.to_dict(), d)
def test_from_dict_simple_edge(self):
# Given
d = {'nodes': node_dicts(2), 'edges': edge_dicts((0, 1))}
# When
cfg = KCFG.from_dict(d)
# Then
self.assertSetEqual(set(cfg.nodes), {node(0), node(1)})
self.assertSetEqual(set(cfg.edges()), {edge(0, 1)})
self.assertEqual(cfg.edge(nid(0), nid(1)), edge(0, 1))
def test_create_node(self):
# Given
cfg = KCFG()
# When
new_node = cfg.create_node(term(0))
# Then
self.assertEqual(new_node, node(0))
self.assertSetEqual(set(cfg.nodes), {node(0)})
self.assertFalse(cfg.is_expanded(new_node.id))
def test_remove_unknown_node(self):
# Given
cfg = KCFG()
# Then
with self.assertRaises(ValueError):
# When
cfg.remove_node(nid(0))
def test_remove_node(self):
# Given
d = {'nodes': node_dicts(3), 'edges': edge_dicts((0, 1), (1, 2))}
cfg = KCFG.from_dict(d)
cfg.add_expanded(node(0).id)
cfg.add_expanded(node(1).id)
# When
cfg.remove_node(nid(1))
# Then
self.assertSetEqual(set(cfg.nodes), {node(0), node(2)})
self.assertSetEqual(set(cfg.edges()), set())
self.assertFalse(cfg.is_expanded(nid(0)))
with self.assertRaises(ValueError):
cfg.node(nid(1))
with self.assertRaises(ValueError):
cfg.edge(nid(0), nid(1))
with self.assertRaises(ValueError):
cfg.edge(nid(1), nid(2))
def test_cover_then_remove(self):
# Given
cfg = KCFG()
# When
node1 = cfg.create_node(CTerm(KApply('<top>', [token(1)])))
node2 = cfg.create_node(CTerm(KApply('<top>', [KVariable('X')])))
cover = cfg.create_cover(node1.id, node2.id)
# Then
self.assertTrue(cfg.is_covered(node1.id))
self.assertFalse(cfg.is_covered(node2.id))
self.assertFalse(cfg.is_expanded(node1.id))
self.assertFalse(cfg.is_expanded(node2.id))
self.assertDictEqual(dict(cover.subst), {'X': token(1)})
self.assertEqual(cfg.covers(), [cover])
# When
cfg.remove_cover(node1.id, node2.id)
# Then
self.assertFalse(cfg.is_covered(node1.id))
self.assertFalse(cfg.is_covered(node2.id))
self.assertFalse(cfg.is_expanded(node1.id))
self.assertFalse(cfg.is_expanded(node2.id))
self.assertEqual(cfg.covers(), [])
def test_insert_loop_edge(self):
# Given
d = {'nodes': node_dicts(1)}
cfg = KCFG.from_dict(d)
# When
new_edge = cfg.create_edge(nid(0), nid(0), TRUE, 1)
# Then
self.assertEqual(new_edge, edge(0, 0))
self.assertSetEqual(set(cfg.nodes), {node(0)})
self.assertSetEqual(set(cfg.edges()), {edge(0, 0)})
self.assertEqual(cfg.edge(nid(0), nid(0)), edge(0, 0))
def test_insert_simple_edge(self):
# Given
d = {'nodes': node_dicts(2)}
cfg = KCFG.from_dict(d)
# When
new_edge = cfg.create_edge(nid(0), nid(1), TRUE, 1)
# Then
self.assertEqual(new_edge, edge(0, 1))
self.assertSetEqual(set(cfg.nodes), {node(0), node(1)})
self.assertSetEqual(set(cfg.edges()), {edge(0, 1)})
def test_get_successors(self):
d = {'nodes': node_dicts(3), 'edges': edge_dicts((0, 1), (0, 2))}
cfg = KCFG.from_dict(d)
# When
succs = set(cfg.edges(source_id=nid(0)))
# Then
self.assertSetEqual(succs, {edge(0, 1), edge(0, 2)})
def test_get_predecessors(self):
d = {'nodes': node_dicts(3), 'edges': edge_dicts((0, 2), (1, 2))}
cfg = KCFG.from_dict(d)
# When
preds = set(cfg.edges(target_id=nid(2)))
# Then
self.assertSetEqual(preds, {edge(0, 2), edge(1, 2)})
def test_reachable_nodes(self):
# Given
d = {
'nodes': node_dicts(12),
'edges': edge_dicts((0, 1), (0, 5), (0, 11), (1, 2), (1, 3), (2, 4), (3, 4), (4, 1)),
'covers': cover_dicts((4, 11)),
}
cfg = KCFG.from_dict(d)
# When
nodes_1 = cfg.reachable_nodes(nid(1))
nodes_2 = cfg.reachable_nodes(nid(1), traverse_covers=True)
# Then
self.assertSetEqual(nodes_1, {node(1), node(2), node(3), node(4)})
self.assertSetEqual(nodes_2, {node(1), node(2), node(3), node(4), node(11)})
def test_paths_between(self):
# Given
d = {
'nodes': node_dicts(4),
'edges': edge_dicts((0, 1), (0, 2), (1, 2), (1, 3), (2, 3), (3, 0)),
}
cfg = KCFG.from_dict(d)
# When
paths = set(cfg.paths_between(nid(0), nid(3)))
# Then
self.assertSetEqual(
paths,
{
(edge(0, 1), edge(1, 3)),
(edge(0, 2), edge(2, 3)),
(edge(0, 1), edge(1, 2), edge(2, 3)),
},
)
| 28.301205 | 97 | 0.548744 | 5,964 | 0.846318 | 0 | 0 | 0 | 0 | 0 | 0 | 518 | 0.073506 |
931f8afd4a5774ac2d04ce2cc9a73a4ae7812abb | 645 | py | Python | tests/test_unit/test_graph/test_tesserae.py | karljohanw/cortexpy | 70dcce771136f98edb5250ad8abd2a46bda7f0a6 | [
"Apache-2.0"
] | null | null | null | tests/test_unit/test_graph/test_tesserae.py | karljohanw/cortexpy | 70dcce771136f98edb5250ad8abd2a46bda7f0a6 | [
"Apache-2.0"
] | null | null | null | tests/test_unit/test_graph/test_tesserae.py | karljohanw/cortexpy | 70dcce771136f98edb5250ad8abd2a46bda7f0a6 | [
"Apache-2.0"
] | null | null | null | from cortexpy.tesserae import Tesserae
class TestTesserae:
def test_mosaic_alignment_on_short_query_and_two_templates(self):
# given
query = "GTAGGCGAGATGACGCCAT"
targets = ["GTAGGCGAGTCCCGTTTATA", "CCACAGAAGATGACGCCATT"]
# when
t = Tesserae()
p = t.align(query, targets)
# then
assert len(p) == 3
assert p[1][0] == 'template0'
assert p[1][1] == 'GTAGGCG'
assert p[1][2] == 0
assert p[1][3] == 6
assert p[2][0] == 'template1'
assert p[2][1] == ' AGATGACGCCAT'
assert p[2][2] == 7
assert p[2][3] == 18
| 24.807692 | 69 | 0.541085 | 603 | 0.934884 | 0 | 0 | 0 | 0 | 0 | 0 | 136 | 0.210853 |
93208a4a4fa8500fdf154d105073add88c48b822 | 151 | py | Python | napari_bb_annotations/__init__.py | czbiohub/napari-bb-annotation | c316931de363e41c702e69316933b486d5f38cb2 | [
"BSD-3-Clause"
] | 3 | 2021-03-12T18:13:40.000Z | 2021-04-27T00:22:03.000Z | napari_bb_annotations/__init__.py | czbiohub/napari-bb-annotation | c316931de363e41c702e69316933b486d5f38cb2 | [
"BSD-3-Clause"
] | 13 | 2021-04-05T20:32:08.000Z | 2021-06-23T05:35:37.000Z | napari_bb_annotations/__init__.py | czbiohub/napari-bb-annotations | c316931de363e41c702e69316933b486d5f38cb2 | [
"BSD-3-Clause"
] | null | null | null | try:
from ._version import version as __version__
except ImportError:
__version__ = "unknown"
from . import _key_bindings
del _key_bindings
| 15.1 | 48 | 0.761589 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 | 0.059603 |
9320c7176b7d0f5a4f94419ff900dc0287dd18c3 | 8,544 | py | Python | tests/l_layer/backward.py | felix990302/nnlib | 618c9f860ff2298ed2e0bbcb249ae74eeb8a408b | [
"Apache-2.0"
] | 1 | 2019-03-27T17:02:50.000Z | 2019-03-27T17:02:50.000Z | tests/l_layer/backward.py | felix990302/nnlib | 618c9f860ff2298ed2e0bbcb249ae74eeb8a408b | [
"Apache-2.0"
] | null | null | null | tests/l_layer/backward.py | felix990302/nnlib | 618c9f860ff2298ed2e0bbcb249ae74eeb8a408b | [
"Apache-2.0"
] | null | null | null | import numpy as np
from numpy.random import RandomState
from numpy.testing import assert_allclose
from nnlib.l_layer.backward import linear_backward, linear_backward_activation, model_backward
from nnlib.utils.derivative import sigmoid_backward, relu_backward
from nnlib.utils.activation import sigmoid, relu
def test_linear_backward():
rand = RandomState(1)
dZ = rand.randn(1, 2)
A = rand.randn(3, 2)
W = rand.randn(1, 3)
dA_prev, dW, db = linear_backward(dZ, (A, 1, W), alpha=0, keep_prob=1)
assert_allclose(dA_prev, [
[0.51822968, -0.19517421],
[-0.40506361, 0.15255393],
[2.37496825, -0.89445391]])
assert_allclose(dW, [[-0.10076895, 1.40685096, 1.64992505]])
assert_allclose(db, [[0.50629448]])
def test_linear_backward_activation_sigmoid():
rand = RandomState(2)
dA = rand.randn(1, 2)
A = rand.randn(3, 2)
W = rand.randn(1, 3)
_ = rand.randn(1, 1) # noqa: F841
Z = rand.randn(1, 2)
dA_prev, dW, db = linear_backward_activation(dA, ((A, 1, W), (Z, sigmoid(Z))), sigmoid_backward, alpha=0, keep_prob=1)
assert_allclose(dA_prev, np.array([
[0.11017994, 0.01105339],
[0.09466817, 0.00949723],
[-0.05743092, -0.00576154]]), rtol=1e-05)
assert_allclose(dW, np.array([[0.10266786, 0.09778551, -0.01968084]]), rtol=1e-05)
assert_allclose(db, np.array([[-0.05729622]]), rtol=1e-05)
def test_linear_backward_activation_relu():
rand = RandomState(2)
dA = rand.randn(1, 2)
A = rand.randn(3, 2)
W = rand.randn(1, 3)
_ = rand.randn(1, 1) # noqa: F841
Z = rand.randn(1, 2)
dA_prev, dW, db = linear_backward_activation(dA, ((A, 1, W), (Z, relu(Z))), relu_backward, alpha=0, keep_prob=1)
assert_allclose(dA_prev, np.array([
[0.44090989, 0.],
[0.37883606, 0.],
[-0.2298228, 0.]]), rtol=1e-05)
assert_allclose(dW, np.array([[0.44513824, 0.37371418, -0.10478989]]), rtol=1e-05)
assert_allclose(db, np.array([[-0.20837892]]), rtol=1e-05)
def test_model_backward():
rand = RandomState(3)
AL = rand.randn(1, 2)
Y = np.array([[1, 0]])
X = rand.randn(4, 2)
W1 = rand.randn(3, 4)
b1 = rand.randn(3, 1)
Z1 = rand.randn(3, 2)
A1 = rand.randn(3, 2)
W2 = rand.randn(1, 3)
b2 = rand.randn(1, 1)
Z2 = rand.randn(1, 2)
parameters = dict(
W={1: W1, 2: W2},
b={1: b1, 2: b2}
)
caches = dict(
Z={1: Z1, 2: Z2},
A={0: X, 1: A1, 2: sigmoid(Z2)},
D={0: 1, 1: 1}
)
grads = model_backward(AL, Y, parameters, caches, alpha=0, keep_prob=1)
assert_allclose(
grads["dW"][1],
np.array([
[0.41010002, 0.07807203, 0.13798444, 0.10502167],
[0., 0., 0., 0.],
[0.05283652, 0.01005865, 0.01777766, 0.0135308]]),
rtol=1e-05
)
assert_allclose(
grads["db"][1],
np.array([
[-0.22007063],
[0.],
[-0.02835349]])
)
assert_allclose(
grads["dA"][1],
np.array([
[0.12913162, -0.44014127],
[-0.14175655, 0.48317296],
[0.01663708, -0.05670698]]),
rtol=1e-05
)
def test_model_backward_l2_regularization():
random_state = RandomState(1)
X = random_state.randn(3, 5)
Y = np.array([[1, 1, 0, 1, 0]])
cache = (
np.array([[-1.52855314, 3.32524635, 2.13994541, 2.60700654, -0.75942115],
[-1.98043538, 4.1600994, 0.79051021, 1.46493512, -0.45506242]]),
np.array([[0., 3.32524635, 2.13994541, 2.60700654, 0.],
[0., 4.1600994, 0.79051021, 1.46493512, 0.]]),
np.array([[-1.09989127, -0.17242821, -0.87785842],
[0.04221375, 0.58281521, -1.10061918]]),
np.array([[1.14472371],
[0.90159072]]),
np.array([[0.53035547, 5.94892323, 2.31780174, 3.16005701, 0.53035547],
[-0.69166075, -3.47645987, -2.25194702, -2.65416996, -0.69166075],
[-0.39675353, -4.62285846, -2.61101729, -3.22874921, -0.39675353]]),
np.array([[0.53035547, 5.94892323, 2.31780174, 3.16005701, 0.53035547],
[0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0.]]),
np.array([[0.50249434, 0.90085595],
[-0.68372786, -0.12289023],
[-0.93576943, -0.26788808]]),
np.array([[0.53035547],
[-0.69166075],
[-0.39675353]]),
np.array(
[[-0.3771104, -4.10060224, -1.60539468, -2.18416951, -0.3771104]]),
np.array(
[[0.40682402, 0.01629284, 0.16722898, 0.10118111, 0.40682402]]),
np.array([[-0.6871727, -0.84520564, -0.67124613]]),
np.array([[-0.0126646]])
)
Z1, A1, W1, b1, Z2, A2, W2, b2, Z3, _, W3, b3 = cache
parameters = dict(
W={1: W1, 2: W2, 3: W3},
b={1: b1, 2: b2, 3: b3}
)
caches = dict(
Z={1: Z1, 2: Z2, 3: Z3},
A={0: X, 1: A1, 2: A2, 3: sigmoid(Z3)},
D={0: 1, 1: 1, 2: 1}
)
AL = caches["A"][3]
grads = model_backward(AL, Y, parameters, caches, alpha=0.7, keep_prob=1)
dW1 = np.array([[-0.25604646, 0.12298827, - 0.28297129],
[-0.17706303, 0.34536094, - 0.4410571]])
dW2 = np.array([[0.79276486, 0.85133918],
[-0.0957219, - 0.01720463],
[-0.13100772, - 0.03750433]])
dW3 = np.array([[-1.77691347, - 0.11832879, - 0.09397446]])
assert_allclose(grads['dW'][1], dW1)
assert_allclose(grads['dW'][2], dW2, rtol=1e-05)
assert_allclose(grads['dW'][3], dW3)
def test_model_backward_dropout():
random_state = RandomState(1)
X = random_state.randn(3, 5)
Y = np.array([[1, 1, 0, 1, 0]])
cache = (
np.array([[-1.52855314, 3.32524635, 2.13994541, 2.60700654, -0.75942115],
[-1.98043538, 4.1600994, 0.79051021, 1.46493512, -0.45506242]]),
np.array([[True, False, True, True, True],
[True, True, True, True, False]],
dtype=bool),
np.array([[0., 0., 4.27989081, 5.21401307, 0.],
[0., 8.32019881, 1.58102041, 2.92987024, 0.]]),
np.array([[-1.09989127, -0.17242821, -0.87785842],
[0.04221375, 0.58281521, -1.10061918]]),
np.array([[1.14472371],
[0.90159072]]),
np.array([[0.53035547, 8.02565606, 4.10524802, 5.78975856, 0.53035547],
[-0.69166075, -1.71413186, -3.81223329, -4.61667916, -0.69166075],
[-0.39675353, -2.62563561, -4.82528105, -6.0607449, -0.39675353]]),
np.array([[True, False, True, False, True],
[False, True, False, True, True],
[False, False, True, False, False]],
dtype=bool),
np.array([[1.06071093, 0., 8.21049603, 0., 1.06071093],
[0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0.]]),
np.array([[0.50249434, 0.90085595],
[-0.68372786, -0.12289023],
[-0.93576943, -0.26788808]]),
np.array([[0.53035547],
[-0.69166075],
[-0.39675353]]),
np.array([[-0.7415562, -0.0126646, -5.65469333, -0.0126646, -0.7415562]]),
np.array([[0.32266394, 0.49683389, 0.00348883, 0.49683389, 0.32266394]]),
np.array([[-0.6871727, -0.84520564, -0.67124613]]),
np.array([[-0.0126646]])
)
Z1, D1, A1, W1, b1, Z2, D2, A2, W2, b2, Z3, A3, W3, b3 = cache
parameters = dict(
W={1: W1, 2: W2, 3: W3},
b={1: b1, 2: b2, 3: b3}
)
caches = dict(
Z={1: Z1, 2: Z2, 3: Z3},
A={0: X, 1: A1, 2: A2, 3: sigmoid(Z3)},
D={0: 1, 1: D1, 2: D2}
)
grads = model_backward(A3, Y, parameters, caches, alpha=0, keep_prob=0.8)
dA1 = np.array([[0.36544439, 0., -0.00188233, 0., -0.17408748],
[0.65515713, 0., -0.00337459, 0., -0.]])
dA2 = np.array([[0.58180856, 0., -0.00299679, 0., -0.27715731],
[0., 0.53159854, -0., 0.53159854, -0.34089673],
[0., 0., -0.00292733, 0., -0., ]])
assert_allclose(grads['dA'][1], dA1, rtol=1e-05)
assert_allclose(grads['dA'][2], dA2, rtol=1e-05)
| 37.147826 | 122 | 0.508544 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 59 | 0.006905 |
93220f73e54d4401d09974fa1a394a20787d5fd3 | 137 | py | Python | src/news_lk/upload_data.py | nuuuwan/news_lk | a2e60fd2dfb49f7c5e8d432cdd5fad0511575197 | [
"MIT"
] | null | null | null | src/news_lk/upload_data.py | nuuuwan/news_lk | a2e60fd2dfb49f7c5e8d432cdd5fad0511575197 | [
"MIT"
] | null | null | null | src/news_lk/upload_data.py | nuuuwan/news_lk | a2e60fd2dfb49f7c5e8d432cdd5fad0511575197 | [
"MIT"
] | null | null | null | """Uploaded data to nuuuwan/news_lk:data branch."""
from news_lk import scrape
if __name__ == '__main__':
scrape.scrape_and_dump()
| 19.571429 | 51 | 0.729927 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 61 | 0.445255 |
932270b633249ca330baa6276f57f5c2b0d91f48 | 1,814 | py | Python | deli_counter/http/mounts/root/routes/v1/validation_models/regions.py | sandwichcloud/deli-counter | e72ee3966f4457ffbd1d432b3516965075c7c86e | [
"MIT"
] | 1 | 2017-10-19T09:25:29.000Z | 2017-10-19T09:25:29.000Z | deli_counter/http/mounts/root/routes/v1/validation_models/regions.py | sandwichcloud/deli-counter | e72ee3966f4457ffbd1d432b3516965075c7c86e | [
"MIT"
] | null | null | null | deli_counter/http/mounts/root/routes/v1/validation_models/regions.py | sandwichcloud/deli-counter | e72ee3966f4457ffbd1d432b3516965075c7c86e | [
"MIT"
] | null | null | null | from schematics import Model
from schematics.types import IntType, UUIDType, StringType, BooleanType
from ingredients_db.models.region import RegionState, Region
from ingredients_http.schematics.types import ArrowType, EnumType
class RequestCreateRegion(Model):
name = StringType(required=True, min_length=3)
datacenter = StringType(required=True)
image_datastore = StringType(required=True)
image_folder = StringType()
class ParamsRegion(Model):
region_id = UUIDType(required=True)
class ParamsListRegion(Model):
name = StringType()
limit = IntType(default=100, max_value=100, min_value=1)
marker = UUIDType()
class RequestRegionSchedule(Model):
schedulable = BooleanType(required=True)
class ResponseRegion(Model):
id = UUIDType(required=True)
name = StringType(required=True, min_length=3)
datacenter = StringType(required=True, )
image_datastore = StringType(required=True)
image_folder = StringType()
schedulable = BooleanType(required=True)
state = EnumType(RegionState, required=True)
current_task_id = UUIDType()
created_at = ArrowType(required=True)
updated_at = ArrowType(required=True)
@classmethod
def from_database(cls, region: Region):
region_model = cls()
region_model.id = region.id
region_model.name = region.name
region_model.datacenter = region.datacenter
region_model.image_datastore = region.image_datastore
region_model.image_folder = region.image_folder
region_model.schedulable = region.schedulable
region_model.state = region.state
region_model.current_task_id = region.current_task_id
region_model.created_at = region.created_at
region_model.updated_at = region.updated_at
return region_model
| 31.275862 | 71 | 0.738699 | 1,570 | 0.865491 | 0 | 0 | 624 | 0.343991 | 0 | 0 | 0 | 0 |
932632d6c40f6be38b39dba6166b3aebd0e53a0b | 257 | py | Python | winremoteenum.py | simondotsh/WinRemoteEnum | f40cff76788709e54e7ed19157870dacfcaa0bc3 | [
"MIT"
] | 2 | 2021-11-25T14:15:12.000Z | 2022-02-02T04:27:17.000Z | winremoteenum.py | simondotsh/WinRemoteEnum | f40cff76788709e54e7ed19157870dacfcaa0bc3 | [
"MIT"
] | null | null | null | winremoteenum.py | simondotsh/WinRemoteEnum | f40cff76788709e54e7ed19157870dacfcaa0bc3 | [
"MIT"
] | 2 | 2021-11-03T18:11:40.000Z | 2022-02-02T15:11:30.000Z | #!/usr/bin/env python3
from src.cli import Cli
from src.core import Orchestrator
def main():
config, args = Cli.parse_and_validate()
Orchestrator.launch_modules(config, args.modules, args.targets, args.audit)
if __name__ == '__main__':
main() | 23.363636 | 79 | 0.727626 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 32 | 0.124514 |
93280c05a831eacbb39a47e2e89a3ee261d3a78f | 1,696 | py | Python | setup.py | easyScience/easyCore | 5d16d5b27803277d0c44886f94dab599f764ae0b | [
"BSD-3-Clause"
] | 2 | 2021-11-02T10:22:45.000Z | 2022-02-18T23:41:19.000Z | setup.py | easyScience/easyCore | 5d16d5b27803277d0c44886f94dab599f764ae0b | [
"BSD-3-Clause"
] | 114 | 2020-06-30T08:52:27.000Z | 2022-03-30T20:47:56.000Z | setup.py | easyScience/easyCore | 5d16d5b27803277d0c44886f94dab599f764ae0b | [
"BSD-3-Clause"
] | 1 | 2022-03-04T13:01:09.000Z | 2022-03-04T13:01:09.000Z |
# -*- coding: utf-8 -*-
# DO NOT EDIT THIS FILE!
# This file has been autogenerated by dephell <3
# https://github.com/dephell/dephell
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
import os.path
readme = ''
here = os.path.abspath(os.path.dirname(__file__))
readme_path = os.path.join(here, 'README.rst')
if os.path.exists(readme_path):
with open(readme_path, 'rb') as stream:
readme = stream.read().decode('utf8')
setup(
long_description=readme,
name='easyScienceCore',
version='0.1.0',
description='Generic logic for easyScience libraries',
python_requires='==3.*,>=3.7.0',
project_urls={"documentation": "https://github.com/easyScience/easyCore", "homepage": "https://github.com/easyScience/easyCore"},
author='Simon Ward',
license='GPL-3.0',
classifiers=['Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'Topic :: Scientific/Engineering :: Physics', 'License :: OSI Approved :: GNU Lesser General Public License v3 or later (LGPLv3+)', 'Programming Language :: Python :: 3 :: Only'],
packages=['easyCore', 'easyCore.Datasets', 'easyCore.Elements', 'easyCore.Elements.Basic', 'easyCore.Elements.HigherLevel', 'easyCore.Fitting', 'easyCore.Objects', 'easyCore.Symmetry', 'easyCore.Utils', 'easyCore.Utils.Hugger', 'easyCore.Utils.io'],
package_dir={"": "."},
package_data={"easyCore.Elements": ["*.json"], "easyCore.Symmetry": ["*.json"]},
install_requires=['asteval==0.*,>=0.9.23', 'bumps==0.*,>=0.8.0', 'dfo-ls==1.*,>=1.2.0', 'lmfit==1.*,>=1.0.0', 'numpy==1.*,>=1.19.0', 'pint==0.*,>=0.16.0', 'uncertainties==3.*,>=3.1.0', 'xarray==0.*,>=0.16.0'],
)
| 43.487179 | 266 | 0.661557 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,053 | 0.620873 |
9328e23e8d93546efc5261e03fba860e2483f556 | 1,505 | py | Python | submit_scripts/mizuRoute/mizuRoute_wrapper.py | BrisClimate/flood-cascade | 660c29275a87785153d0f107ed23104fcbcbddee | [
"MIT"
] | null | null | null | submit_scripts/mizuRoute/mizuRoute_wrapper.py | BrisClimate/flood-cascade | 660c29275a87785153d0f107ed23104fcbcbddee | [
"MIT"
] | null | null | null | submit_scripts/mizuRoute/mizuRoute_wrapper.py | BrisClimate/flood-cascade | 660c29275a87785153d0f107ed23104fcbcbddee | [
"MIT"
] | 3 | 2020-11-08T16:01:47.000Z | 2021-01-13T17:13:32.000Z | #!/cm/shared/languages/python-3.3.2/bin/python
# submit script for submission of mizuRoute simualtions
# Peter Uhe Oct 29 2019
#
# call this script from 'run_mizuRoute_templated_mswep050calib.py which creates a qsub job to submit to the HPC queue
# This script is actually called from 'call_pythonscript.sh' (which is needed to load modules before calling the script)
import os,glob,subprocess,sys,shutil,multiprocessing
import datetime
def call_subproc(cmd,logfile):
subprocess.call(cmd,stdout=open(logfile,'w'),stderr=subprocess.STDOUT)
# Print start time
print('Starting:',datetime.datetime.now())
# Get environment variables
control_files = os.environ['CONTROL_FLIST'].split(':')
logdir = os.environ['LOGDIR']
ncpus = int(os.environ['NCPUS'])
mizuexe = os.environ['MIZU_EXE']
print('running simulations',len(control_files))
print(os.environ['CONTROL_FLIST'])
pool = multiprocessing.Pool(processes=ncpus)
for control_file in control_files:
# Todo, could add check if this simulation has already been run
fname = os.path.basename(control_file)
sim_name =fname[8:-4]
logfile = os.path.join(logdir,sim_name+'.log')
cmd = ['time',mizuexe,control_file]
print('command',cmd)
print('log',logfile)
#ret = pool.apply_async(subprocess.call,cmd,{'stdout':open(logfile,'w') ,'stderr':subprocess.STDOUT})
#subprocess.call(cmd,stdout=open(logfile,'w'),stderr=subprocess.STDOUT)
ret = pool.apply_async(call_subproc,[cmd,logfile])
pool.close()
pool.join()
print('Finished:',datetime.datetime.now())
| 35 | 120 | 0.765449 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 772 | 0.512957 |
932c68f3bfdd0fa4f25517ebea01555ab01db520 | 17,968 | py | Python | txml.py | jdelgit/txml | 07c28962a34397b546d56c1adb60c0ef20f71c22 | [
"MIT"
] | null | null | null | txml.py | jdelgit/txml | 07c28962a34397b546d56c1adb60c0ef20f71c22 | [
"MIT"
] | null | null | null | txml.py | jdelgit/txml | 07c28962a34397b546d56c1adb60c0ef20f71c22 | [
"MIT"
] | null | null | null | from xml.etree.ElementTree import iterparse, ParseError
from io import StringIO
from os.path import isfile
from re import findall
class XmlParser:
def __init__(self, source=""):
self.source = source
self.proces_file = False
self.use_io = False
self.encoding = 'UTF-8'
self.namespaces = {}
self.namespace_present = False
self._source_check()
# see also _get_encoding, _get_namespaces
def _source_check(self):
"""
[Function checkes whether the source input is a existing xml file
or a xml syle formatted string]
"""
_extension = self.source[-3:]
if _extension == "xml" or _extension == "xsd":
if isfile(self.source):
self.proces_file = True
self._get_encoding()
self._get_namespaces()
else:
print("File not found {}".format(self.source))
else:
context_test = iterparse(StringIO("""{}""".format(self.source)))
try:
context_test.__next__()
del context_test
self.proces_file = True
self.use_io = True
self._get_encoding()
self._get_namespaces()
except ParseError:
del context_test
print("Input is not in supported Xml format")
def _get_encoding(self):
if self.proces_file and not self.use_io:
with open(self.source, 'r') as f:
l = f.readline()
if 'encoding' in l:
match = findall('(encoding=.*\?)', l)
encoding = match[0].split('=')[1].replace(
'?', '').replace('\"', '')
self.encoding = encoding
# see also get_all_tags
def _get_namespaces(self):
"""[Creates a dictionary of the namespaces with their associated tags ]
Returns:
[dict] -- [Dictionary with namespaces as keys
and the corresponding tags in a list as value ]
"""
tags = self.get_all_tags()
namespaces = {}
for tag in tags:
namespace = findall('({.{1,}})', tag)
if len(namespace) > 0:
namespace = namespace[0]
formatted_tag = tag.replace(namespace, '')
try:
namespaces[namespace].append(formatted_tag)
except KeyError:
namespaces[namespace] = [formatted_tag]
if namespaces:
self.namespace_present = True
self.namespaces = namespaces
# return namespaces
def get_all_tags(self):
"""[All the unique tags available in the Xml
No hierachy is mainted for the xml structure]
Returns:
[list] -- [A list of all the unique tags available in the Xml]
"""
if self.source and self.proces_file:
if self.use_io:
context = iterparse(StringIO("""{}""".format(self.source)),
events=("start",))
else:
data = open(self.source, 'r', encoding=self.encoding)
context = iterparse(data, events=("start",))
else:
print("No source XML-file provided")
return
tag_set = []
for event, elem in context:
tag_set.append(elem.tag)
elem.clear()
if self.source and self.proces_file and not self.use_io:
data.close() # close filestream
del context
tag_set = list(set(tag_set))
return tag_set
# see also search_nodes
def search_namespace_node(self, namespace="", tag=""):
ntag = "{}{}".format(namespace, tag)
for node in self.search_nodes(tag=ntag):
yield node
# see also search_node_attr
def search_namespace_attr(self, namespace="", tag="", **kwargs):
ntag = "{}{}".format(namespace, tag)
for node in self.search_node_attr(tag=ntag, kwargs=kwargs):
yield node
# see also seach_nodes
def search_node_attr(self, tag="", get_children=True, **kwargs):
"""[This function filters results from the <search_node> function
based on given attributes,values]
Keyword Arguments:
tag {str} -- [tag of Xml node element] (default: {""})
get_children {bool} -- [Choice for whether subnodes
should be returned] (default: {True})
Returns / yields:
[dict] -- [Dictionary containing all matching nodes]
"""
if 'kwargs' in kwargs:
kwargs = kwargs['kwargs']
for node in self.search_nodes(tag=tag, get_children=get_children):
if len(kwargs) > 0:
for key in kwargs:
arg = kwargs[key]
try:
node_val = node['element']['attr'][key]
except KeyError:
# print("Key '{}' not found in element {}".format(key,
# tag))
# exit function if non-existing key is requested
node_val = ''
if node_val == arg:
give_node = True
else:
# attribute not matching
# move on to next node
give_node = False
break
else:
give_node = True
if give_node:
yield node
# see also _node_to_dict and _stack_state_controller
def search_nodes(self, tag="", get_children=True):
"""[If a tag is specified the function returns an generator
with all Xml elements which have a matching tag.
If tag is not specified, the root node is returned
When get_children is set, the function returns the subnodes
nested in a list of dictionaries]
Keyword Arguments:
tag {str} -- [tag of Xml node element] (default: {""})
get_children {bool} -- [Choice for whether subnodes
should be returned] (default: {True})
"""
if self.source and self.proces_file:
if self.use_io:
context = iterparse(StringIO("""{}""".format(self.source)),
events=('start', 'end'))
else:
data = open(self.source, 'r', encoding=self.encoding)
context = iterparse(data, events=('start', 'end'))
else:
print("Unable to process input")
return
if get_children:
children = []
p_stack = []
tag_stack = []
p_tag = ""
c_tag = ""
npd = False
append_children = False
for event, elem in context:
if not tag:
# if no tag is given then get data for entire document
tag = elem.tag
if get_children:
if elem.tag != tag and append_children:
event, elem, p_tag, c_tag, p_stack, \
tag_stack, children, npd = \
self._stack_state_controller(event=event,
elem=elem,
p_tag=p_tag,
c_tag=c_tag,
p_stack=p_stack,
tag_stack=tag_stack,
children=children,
npd=npd)
if elem.tag == tag and event == 'start':
append_children = True
if elem.tag == tag and event == 'end':
node_dict = self._node_to_dict(elem)
output_dict = {'element': node_dict, 'children': []}
elem.clear()
if get_children:
output_dict['children'] = children
children = []
append_children = False
yield output_dict
del context
if self.source and self.proces_file and not self.use_io:
data.close() # close filestream
del data
# see also node_to_dict
def _stack_state_controller(self, event, elem, p_tag="", c_tag="",
p_stack=[], tag_stack=[], children=[],
npd=False):
"""[Keeps track of a dictionary stack and a tag stack, and updates them as required.
This is done based on the start/end triggers from the elements in the Xml format]
Arguments:
event {[str]} -- [start/end points of element]
elem {[et.etree.ElementTree.Element]} -- [description]
Keyword Arguments:
p_tag {str} -- [Current parent tag (top of dict stack). (not used actively) ] (default: {""})
c_tag {str} -- [Current child tag (top of tag stack)] (default: {""})
p_stack {list} -- [Stack for holding the parent dictionaries ] (default: {[]})
tag_stack {list} -- [Stack for holding all the tags] (default: {[]})
children {list} -- [List for holding all subnodes found] (default: {[]})
npd {bool} -- [When set new dictionary is appended to stack] (default: {False})
Returns:
All arguments passed to it are returned after being updated
"""
# ndp controls the creation of new dicts in the p_stack
if (elem.tag != c_tag) and (event == "start"):
tag_stack.append(elem.tag)
if npd:
# add new dictionary when children are confiremed to exist
_p_dict = {'children': [], 'element': ""}
p_stack.append(_p_dict)
p_tag = c_tag
c_tag = elem.tag
npd = True
elif (elem.tag == c_tag) and (event == "end"):
if len(tag_stack) == 1:
# last child on stack
if len(p_stack) > 0:
# child has children
_child = p_stack.pop()
_child['element'] = self._node_to_dict(elem)
else:
_child = {'children': [],
'element': self._node_to_dict(elem)}
children.append(_child)
c_tag = ""
tag_stack.pop()
elif len(tag_stack) == len(p_stack):
_child = p_stack.pop()
_parent = p_stack.pop()
_child['element'] = self._node_to_dict(elem)
_parent['children'].append(_child)
p_stack.append(_parent)
tag_stack.pop()
c_tag = tag_stack[-1]
if len(tag_stack) > 1:
p_tag = tag_stack[-2]
else:
p_tag = ""
else:
_parent = p_stack.pop()
_child = self._node_to_dict(elem)
_parent['children'].append(_child)
p_stack.append(_parent)
tag_stack.pop()
c_tag = tag_stack[-1]
if len(tag_stack) > 1:
p_tag = tag_stack[-2]
else:
p_tag = ""
npd = False
elem.clear()
return [event, elem, p_tag, c_tag, p_stack,
tag_stack, children, npd]
def _node_to_dict(self, node=""):
"""[Convert node element attributes to dictionary]
Keyword Arguments:
node {et.etree.ElementTree.Element} -- [] (default: {""})
Returns:
[dict] -- [Dictionary containing all the attribute,value pairs
contained in the node]
"""
data = {}
data['attr'] = {n[0]: n[1] for n in node.items()}
data['text'] = node.text
data['tag'] = node.tag
return data
class XsdtoDict:
def __init__(self, source=''):
self.source = source
def convert_to_dict(self):
parser = XmlParser(source=self.source)
xsd_tags = self.get_export_type_data(parser)
data = {}
for tag in xsd_tags:
data[tag['name']] = self.parse_xml_entry(tag, parser)
return data
def get_export_type_data(self, validation_parser):
all_nodes = validation_parser.search_nodes()
output_types = []
for nodes in all_nodes:
if nodes:
output_types = [{'name': entry['element']['attr']['name'],
'tag': entry['element']['tag']}
for entry in nodes['children']]
return output_types
def parse_xml_entry(self, tag_data, xml_iterator):
parent_tag = tag_data['tag']
parent_name = tag_data['name']
sub_elements = xml_iterator.search_node_attr(tag=parent_tag,
name=parent_name)
if 'complexType' in parent_tag:
output = self.parse_complextypes(sub_elements)
elif 'simpleType' in parent_tag:
output = self.parse_simpletypes(sub_elements)
else:
output = list(sub_elements)
return output
def parse_complextypes(self, complex_iterator):
output = {}
for element_data in complex_iterator:
output['attr'] = element_data['element']['attr']
output['sequence'] = []
if element_data['children']:
for sub_element in element_data['children']:
if 'sequence' in sub_element['element']['tag']:
sequence_data = self.parse_sequence(sub_element['children'])
output['sequence'].append(sequence_data)
else:
pass
return output
def parse_sequence(self, sequence_elements):
sequence_output = []
for element in sequence_elements:
element_data = self.parse_element(element)
sequence_output.append(element_data)
return sequence_output
def parse_element(self, element):
output = {}
if 'children' in element:
output['tag'] = element['element']['tag']
output['attr'] = element['element']['attr']
element_children = element['children']
output['children_data'] = []
for child in element_children:
if 'simpleType' in child['element']['tag']:
child_data = self.parse_simpletypes(child)
output['children_data'].append(child_data)
else:
output['tag'] = element['tag']
output['attr'] = element['attr']
return output
def parse_simpletypes(self, simple_element):
output = {}
try:
element_children = simple_element['children']
for child_element in element_children:
if 'restriction' in child_element['element']['tag']:
output['restrictions'] = {'attr': child_element['element']['attr']}
restriction_data = self.parse_restrictions(child_element['children'])
output['restrictions']['restrictions'] = restriction_data
except TypeError:
element_data = list(simple_element)
element_data = element_data[0]
element_children = element_data['children']
element_children = element_children[0]['children']
output['restrictions'] = []
for data in element_children:
if 'element' in data:
output['restrictions'].append(data['element']['attr'])
else:
if 'minLength' in data['tag']:
output['restrictions'].append({'minlength':data['attr']})
if 'maxLength' in data['tag']:
output['restrictions'].append({'maxlength':data['attr']})
return output
def parse_restrictions(self, restriction_iterator):
output = []
for restriction in restriction_iterator:
restriction_data = {}
restriction_data['enumarations'] = []
restriction_data['length_data'] = []
if 'element' in restriction:
if 'enumeration' in restriction['element']['tag']:
enumaration_data = self.parse_enumarations(restriction['children'])
restriction_data['enumarations'].append(enumaration_data)
restriction_data['attr'] = restriction['element']['attr']
elif 'Length' in restriction['element']['tag']:
restriction_data['attr'] = restriction['element']['attr']
restriction_data['length_data'].append(restriction['element']['attr'])
else:
restriction_data['attr'] = restriction['attr']
output.append(restriction_data)
return output
def parse_enumarations(self, enumeration_iterator):
output = {'annotations': ""}
for enumaration in enumeration_iterator:
if 'annotation' in enumaration['element']['tag']:
annotations = enumaration['children']
annot = {'documentation': []}
for annotation in annotations:
annot['documentation'].append({'attr': annotation['attr'],
'text': ['text']})
output['annotations'] = annot
return output
| 37.668763 | 105 | 0.509016 | 17,832 | 0.992431 | 4,728 | 0.263134 | 0 | 0 | 0 | 0 | 4,922 | 0.273931 |
932d1dd3f28fbe11836dbddd059b224fbf55e0a8 | 1,404 | py | Python | app/orders.py | Gabkings/fast-food-api1 | 129bea30cfa0ecb017f2f26bb34a6aa2716f9f04 | [
"MIT"
] | null | null | null | app/orders.py | Gabkings/fast-food-api1 | 129bea30cfa0ecb017f2f26bb34a6aa2716f9f04 | [
"MIT"
] | null | null | null | app/orders.py | Gabkings/fast-food-api1 | 129bea30cfa0ecb017f2f26bb34a6aa2716f9f04 | [
"MIT"
] | null | null | null | from flask import Flask, request
from flask_restful import Resource
from .models import Order, orders
class OrderDetals(Resource):
def get(self, id):
order = Order().get_order_by_id(id)
if not order:
return {"message":"Order not found"}, 404
return {"order": order.collect_order_details()}, 200
def delete(self, id):
# global orders
# orders = list(filter(lambda x: x['id'] != id, orders))
# return {"messsage": "item deleted"}
order = Order().get_order_by_id(id)
if not order:
return {"message":"Order not found"}, 404
orders.remove(order)
return {"message":"order deleted successfully"},200
def put(self, id):
# data = request.get_json()
order = Order().get_by_id(id)
if order:
return {"message":"Order not found"}, 404
order.status="approved"
return {"message":"status approved"}
class NewOrderPlacement(Resource):
def post(self):
data = request.get_json()
order = Order(data['name'], data["price"],data['no_of_items_ordered'])
orders.append(order)
return {"message":"Food order created"}, 201
class DisplayAllOrders(Resource):
def get(self):
return {"orders":[order.collect_order_details() for order in orders]}
| 25.527273 | 78 | 0.58547 | 1,276 | 0.908832 | 0 | 0 | 0 | 0 | 0 | 0 | 364 | 0.259259 |
932e05da095fad449ca578a15a6dc14375d17137 | 5,198 | py | Python | python/storyboard/bias_optimizer.py | stanford-futuredata/sketchstore | c209e4d01343a05dc5aecdb7a9801fc639019fd3 | [
"Apache-2.0"
] | 5 | 2020-05-07T02:25:01.000Z | 2021-05-25T07:54:42.000Z | python/storyboard/bias_optimizer.py | stanford-futuredata/sketchstore | c209e4d01343a05dc5aecdb7a9801fc639019fd3 | [
"Apache-2.0"
] | 1 | 2020-07-31T20:04:59.000Z | 2020-07-31T20:04:59.000Z | python/storyboard/bias_optimizer.py | stanford-futuredata/sketchstore | c209e4d01343a05dc5aecdb7a9801fc639019fd3 | [
"Apache-2.0"
] | 1 | 2020-07-06T20:39:05.000Z | 2020-07-06T20:39:05.000Z | from typing import Mapping, Any, Sequence
import numpy as np
import heapq
import math
from tqdm import tqdm
import scipy.optimize
import cvxpy as cvx
def n_bias(x_count: np.ndarray, bias: float):
# return np.sum(x_count[x_count >= bias])
clipped = np.clip(x_count - bias, a_min=0, a_max=None)
return np.sum(clipped)
def cost(bs, ns):
return np.sum(bs) ** 2 + (1 / 4) * (np.sum(ns ** 2))
def opt_cvx(
x_counts: Sequence[np.ndarray],
sizes: Sequence[int],
n_iter: int=10
) -> np.ndarray:
n = len(sizes)
Bs = cvx.Variable(n)
constraints = [
Bs >= 0
]
term2 = 0
for i in range(n):
x_count = x_counts[i]
size = sizes[i]
term2 += cvx.square(cvx.sum(cvx.pos(x_count - Bs[i])) / size)
o = cvx.Minimize(
4 * cvx.square(cvx.sum(Bs)) + term2
)
prob = cvx.Problem(o, constraints)
sol = prob.solve(solver=cvx.ECOS)
b_values = Bs.value
n_adj = np.zeros(n)
for i in range(n):
n_adj[i] = n_bias(x_counts[i], b_values[i]) / sizes[i]
print("Cost: {}".format(cost(b_values, n_adj)))
return np.round(b_values)
def n_deriv(x_count, bias, nraw=1, s=1):
return nraw/s**2 * np.sum(x_count >= bias)
base = 2.0
def convert_to_bs(b_pows):
bs = base**b_pows
if isinstance(bs, np.ndarray):
bs[bs < 1] = 0
else:
if bs < 1:
bs = 0
# bs = np.floor(2.0 ** b_pows)
return bs
def opt_sequence_2(
x_counts: Sequence[np.ndarray],
sizes: Sequence[int],
n_iter: int=10
) -> np.ndarray:
n = len(x_counts)
bs = np.zeros(n)
n_adj = np.zeros(n)
for i in range(n):
n_adj[i] = n_bias(x_counts[i], bs[i]) / sizes[i]
pq = []
for s_idx in range(len(x_counts)):
n_raw = n_adj[s_idx] * sizes[s_idx]
heapq.heappush(
pq,
(-n_deriv(x_counts[s_idx], bs[s_idx], nraw=n_raw, s=sizes[s_idx]), s_idx)
)
print("Optimizing Bias")
for cur_iter in tqdm(range(n_iter)):
_, opt_idx = heapq.heappop(pq)
# opt_idx = cur_iter % 3
# print("bs:{}".format(bs))
# print("ns:{}".format(n_adj))
# print("cost: {}".format(old_cost))
old_cost = cost(bs, n_adj)
def cost_b_fun(b):
new_bs = bs.copy()
new_adj = n_adj.copy()
new_bs[opt_idx] = b
new_adj[opt_idx] = n_bias(x_counts[opt_idx], b) / sizes[opt_idx]
return cost(new_bs, new_adj)
max_b = np.sum(x_counts[opt_idx])/sizes[opt_idx]
bracket = None
if bs[opt_idx] > 0:
bracket = (0, bs[opt_idx], max_b)
res = scipy.optimize.minimize_scalar(
cost_b_fun,
bracket=bracket,
bounds=(0, max_b),
tol=0.1
)
best_b = res.x
print("best b: {}".format(best_b))
new_cost = res.fun
print("Old Cost: {}".format(old_cost))
print("New Cost: {}".format(new_cost))
# if (new_cost > old_cost*.98):
# break
bs[opt_idx] = best_b
n_adj[opt_idx] = n_bias(x_counts[opt_idx], bs[opt_idx]) / sizes[opt_idx]
n_raw = n_adj[opt_idx] * sizes[opt_idx]
heapq.heappush(
pq,
(-n_deriv(x_counts[opt_idx], bs[opt_idx], nraw=n_raw, s=sizes[opt_idx]), opt_idx)
)
print("Heap: {}".format(pq))
return bs
def opt_sequence(
x_counts: Sequence[np.ndarray],
sizes: Sequence[int],
n_iter: int=10
) -> np.ndarray:
n = len(x_counts)
b_pows = np.zeros(n) - 1
bs = convert_to_bs(b_pows)
n_adj = np.zeros(n)
for i in range(n):
n_adj[i] = n_bias(x_counts[i], bs[i]) / sizes[i]
pq = []
for s_idx in range(len(x_counts)):
heapq.heappush(
pq,
(-n_deriv(x_counts[s_idx], bs[s_idx]), s_idx)
)
shifts = np.array([-1, 0, 1])
print("Optimizing Bias")
for cur_iter in tqdm(range(n_iter)):
_, opt_idx = heapq.heappop(pq)
# print("bs:{}".format(bs))
# print("ns:{}".format(n_adj))
# print("cost: {}".format(old_cost))
new_costs = np.zeros(3)
for shift_idx, cur_shift in enumerate(shifts):
cur_b_pow = b_pows[opt_idx] + cur_shift
bs[opt_idx] = convert_to_bs(cur_b_pow)
# bs[opt_idx] = math.floor(2.0 ** cur_b_pow)
n_adj[opt_idx] = n_bias(x_counts[opt_idx], bs[opt_idx]) / sizes[opt_idx]
new_costs[shift_idx] = cost(bs, n_adj)
# print("i:{},b:{},deltas:{}".format(opt_idx, cur_b_pow, new_costs - old_cost))
best_shift_idx = np.argmin(new_costs)
print("New Cost: {}".format(new_costs[best_shift_idx]))
b_pows[opt_idx] += shifts[best_shift_idx]
# bs[opt_idx] = math.floor(2.0 ** b_pows[opt_idx])
bs[opt_idx] = convert_to_bs(b_pows[opt_idx])
n_adj[opt_idx] = n_bias(x_counts[opt_idx], bs[opt_idx]) / sizes[opt_idx]
if shifts[best_shift_idx] == 0:
break
heapq.heappush(
pq,
(-n_deriv(x_counts[opt_idx], bs[opt_idx]), opt_idx)
)
return bs | 29.039106 | 93 | 0.55329 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 624 | 0.120046 |
932e5914cfad78eadac43b5ef29f6ed1ab78376e | 874 | py | Python | config/conf.d/04-spawner-common.py | possiblyMikeB/davidson-jupyter | 8980c1249756d421b70950b9895b75e16d7e68c0 | [
"BSD-3-Clause"
] | null | null | null | config/conf.d/04-spawner-common.py | possiblyMikeB/davidson-jupyter | 8980c1249756d421b70950b9895b75e16d7e68c0 | [
"BSD-3-Clause"
] | null | null | null | config/conf.d/04-spawner-common.py | possiblyMikeB/davidson-jupyter | 8980c1249756d421b70950b9895b75e16d7e68c0 | [
"BSD-3-Clause"
] | null | null | null | import json, os
## base spawner config
try:
c.Spawner.cmd = \
json.loads(os.environ['SPAWNER_CMD'])
except KeyError:
c.Spawner.cmd = [
'jupyterhub-singleuser', # OAuth wrapped jupyter instance server
'--KernelManager.transport=ipc', # -- all kernel comms over UNIX sockets
'--MappingKernelManager.cull_idle_timeout=0' # -- no kernel culling
]
c.Spawner.http_timeout = int(os.environ.get('SPAWNER_HTTP_TIMEOUT', '20')) # grace period for spawner connect back
c.Spawner.default_url = os.environ.get('SPAWNER_DEFAULT_URL', '/lab') # default route to visit once spawned
# set jupyter instacne base directory (relative to $HOME)
if hub_id.lower() in {'jupyter', 'public', 'pub'}:
c.Spawner.notebook_dir = ''
else:
# restrict to context specific notebook path
c.Spawner.notebook_dir = f'Workspace/{hub_id}'
pass
| 33.615385 | 114 | 0.693364 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 508 | 0.581236 |
932fbbd3b2489556c428736a9418ed424b87b707 | 339 | py | Python | datasets/__init__.py | radarsat1/latentspace | e651dc14ed7b4dd3f28577639a06b7160c5fca5e | [
"MIT"
] | null | null | null | datasets/__init__.py | radarsat1/latentspace | e651dc14ed7b4dd3f28577639a06b7160c5fca5e | [
"MIT"
] | null | null | null | datasets/__init__.py | radarsat1/latentspace | e651dc14ed7b4dd3f28577639a06b7160c5fca5e | [
"MIT"
] | null | null | null |
__all__ = ['get_dataset']
def get_dataset(params):
if params['name'] == 'multimodal_points':
from datasets.multimodal_gaussian_2d import Dataset
return Dataset(params)
elif params['name'] == 'kicks':
from datasets.kicks import Dataset
return Dataset(params)
assert False and 'Unknown dataset'
| 28.25 | 59 | 0.675516 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 68 | 0.20059 |
932fd6103c8719c4b707f1914eddb51377cb4198 | 8,419 | py | Python | memsource_cli/models/async_request_dto.py | unofficial-memsource/memsource-cli-client | a6639506b74e95476da87f4375953448b76ea90c | [
"Apache-2.0"
] | 16 | 2019-09-25T00:20:38.000Z | 2021-05-04T05:56:10.000Z | memsource_cli/models/async_request_dto.py | zerodayz/memsource-cli-client | c2574f1467539a49e6637c874e88d75c7ef789b3 | [
"Apache-2.0"
] | 26 | 2019-09-30T14:00:03.000Z | 2021-05-12T11:15:18.000Z | memsource_cli/models/async_request_dto.py | zerodayz/memsource-cli-client | c2574f1467539a49e6637c874e88d75c7ef789b3 | [
"Apache-2.0"
] | 1 | 2021-05-24T16:19:14.000Z | 2021-05-24T16:19:14.000Z | # coding: utf-8
"""
Memsource REST API
Welcome to Memsource's API documentation. To view our legacy APIs please [visit our documentation](https://wiki.memsource.com/wiki/Memsource_API) and for more information about our new APIs, [visit our blog](https://www.memsource.com/blog/2017/10/24/introducing-rest-apis-qa-with-the-memsource-api-team/). If you have any questions, please contact [Memsource Support](<mailto:support@memsource.com>). # noqa: E501
OpenAPI spec version: Latest
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
# from memsource_cli.models.async_request_dto import AsyncRequestDto
from memsource_cli.models.async_response_dto import AsyncResponseDto # noqa: F401,E501
from memsource_cli.models.project_reference import ProjectReference # noqa: F401,E501
from memsource_cli.models.user_reference import UserReference # noqa: F401,E501
class AsyncRequestDto(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'id': 'str',
'created_by': 'UserReference',
'date_created': 'datetime',
'action': 'str',
'async_response': 'AsyncResponseDto',
'parent': 'AsyncRequestDto',
'project': 'ProjectReference'
}
attribute_map = {
'id': 'id',
'created_by': 'createdBy',
'date_created': 'dateCreated',
'action': 'action',
'async_response': 'asyncResponse',
'parent': 'parent',
'project': 'project'
}
def __init__(self, id=None, created_by=None, date_created=None, action=None, async_response=None, parent=None, project=None): # noqa: E501
"""AsyncRequestDto - a model defined in Swagger""" # noqa: E501
self._id = None
self._created_by = None
self._date_created = None
self._action = None
self._async_response = None
self._parent = None
self._project = None
self.discriminator = None
if id is not None:
self.id = id
if created_by is not None:
self.created_by = created_by
if date_created is not None:
self.date_created = date_created
if action is not None:
self.action = action
if async_response is not None:
self.async_response = async_response
if parent is not None:
self.parent = parent
if project is not None:
self.project = project
@property
def id(self):
"""Gets the id of this AsyncRequestDto. # noqa: E501
:return: The id of this AsyncRequestDto. # noqa: E501
:rtype: str
"""
return self._id
@id.setter
def id(self, id):
"""Sets the id of this AsyncRequestDto.
:param id: The id of this AsyncRequestDto. # noqa: E501
:type: str
"""
self._id = id
@property
def created_by(self):
"""Gets the created_by of this AsyncRequestDto. # noqa: E501
:return: The created_by of this AsyncRequestDto. # noqa: E501
:rtype: UserReference
"""
return self._created_by
@created_by.setter
def created_by(self, created_by):
"""Sets the created_by of this AsyncRequestDto.
:param created_by: The created_by of this AsyncRequestDto. # noqa: E501
:type: UserReference
"""
self._created_by = created_by
@property
def date_created(self):
"""Gets the date_created of this AsyncRequestDto. # noqa: E501
:return: The date_created of this AsyncRequestDto. # noqa: E501
:rtype: datetime
"""
return self._date_created
@date_created.setter
def date_created(self, date_created):
"""Sets the date_created of this AsyncRequestDto.
:param date_created: The date_created of this AsyncRequestDto. # noqa: E501
:type: datetime
"""
self._date_created = date_created
@property
def action(self):
"""Gets the action of this AsyncRequestDto. # noqa: E501
:return: The action of this AsyncRequestDto. # noqa: E501
:rtype: str
"""
return self._action
@action.setter
def action(self, action):
"""Sets the action of this AsyncRequestDto.
:param action: The action of this AsyncRequestDto. # noqa: E501
:type: str
"""
allowed_values = ["PRE_ANALYSE", "POST_ANALYSE", "COMPARE_ANALYSE", "PRE_TRANSLATE", "ASYNC_TRANSLATE", "IMPORT_JOB", "IMPORT_FILE", "ALIGN", "EXPORT_TMX_BY_QUERY", "IMPORT_TMX", "INSERT_INTO_TM", "QA", "UPDATE_CONTINUOUS_JOB"] # noqa: E501
if action not in allowed_values:
raise ValueError(
"Invalid value for `action` ({0}), must be one of {1}" # noqa: E501
.format(action, allowed_values)
)
self._action = action
@property
def async_response(self):
"""Gets the async_response of this AsyncRequestDto. # noqa: E501
:return: The async_response of this AsyncRequestDto. # noqa: E501
:rtype: AsyncResponseDto
"""
return self._async_response
@async_response.setter
def async_response(self, async_response):
"""Sets the async_response of this AsyncRequestDto.
:param async_response: The async_response of this AsyncRequestDto. # noqa: E501
:type: AsyncResponseDto
"""
self._async_response = async_response
@property
def parent(self):
"""Gets the parent of this AsyncRequestDto. # noqa: E501
:return: The parent of this AsyncRequestDto. # noqa: E501
:rtype: AsyncRequestDto
"""
return self._parent
@parent.setter
def parent(self, parent):
"""Sets the parent of this AsyncRequestDto.
:param parent: The parent of this AsyncRequestDto. # noqa: E501
:type: AsyncRequestDto
"""
self._parent = parent
@property
def project(self):
"""Gets the project of this AsyncRequestDto. # noqa: E501
:return: The project of this AsyncRequestDto. # noqa: E501
:rtype: ProjectReference
"""
return self._project
@project.setter
def project(self, project):
"""Sets the project of this AsyncRequestDto.
:param project: The project of this AsyncRequestDto. # noqa: E501
:type: ProjectReference
"""
self._project = project
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(AsyncRequestDto, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, AsyncRequestDto):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 29.749117 | 421 | 0.602328 | 7,459 | 0.885972 | 0 | 0 | 3,967 | 0.471196 | 0 | 0 | 4,302 | 0.510987 |
932ff6498e2e97f5e213cd233e1d1ac08b819528 | 188 | py | Python | python/p.py | gmasching/project-euler | b4afa94f83756f3a44667ec2d7fcb5bca4304825 | [
"MIT"
] | 2 | 2020-06-03T20:55:27.000Z | 2021-04-17T10:10:17.000Z | python/p.py | gmasching/project-euler | b4afa94f83756f3a44667ec2d7fcb5bca4304825 | [
"MIT"
] | null | null | null | python/p.py | gmasching/project-euler | b4afa94f83756f3a44667ec2d7fcb5bca4304825 | [
"MIT"
] | 2 | 2020-06-03T20:55:33.000Z | 2021-04-17T10:10:47.000Z | def f(x):
#return 1*x**3 + 5*x**2 - 2*x - 24
#return 1*x**4 - 4*x**3 - 2*x**2 + 12*x - 3
return 82*x + 6*x**2 - 0.67*x**3
print(f(2)-f(1))
#print((f(3.5) - f(0.5)) / -3)
#print(f(0.5)) | 23.5 | 44 | 0.457447 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 121 | 0.643617 |
93319b806e1474fb346a2ce001af51f8482adee0 | 19,194 | py | Python | Inference_Model.py | CODEJIN/WaveRNN | 835462e71fd151c8ca0a0ed68f691089f1637851 | [
"MIT"
] | 1 | 2020-03-22T16:55:53.000Z | 2020-03-22T16:55:53.000Z | Inference_Model.py | CODEJIN/WaveRNN | 835462e71fd151c8ca0a0ed68f691089f1637851 | [
"MIT"
] | null | null | null | Inference_Model.py | CODEJIN/WaveRNN | 835462e71fd151c8ca0a0ed68f691089f1637851 | [
"MIT"
] | 1 | 2020-12-12T14:41:50.000Z | 2020-12-12T14:41:50.000Z | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import functools
import itertools
import threading
import numpy as np
from six.moves import zip # pylint: disable=redefined-builtin
from google.protobuf import json_format
from tensorflow.core.framework import node_def_pb2
from tensorflow.python.autograph.core import ag_ctx
from tensorflow.python.autograph.impl import api as autograph
from tensorflow.python.distribute import distribution_strategy_context as ds_context
from tensorflow.python.eager import context
from tensorflow.python.eager import execute
from tensorflow.python.eager import function
from tensorflow.python.eager import monitoring
from tensorflow.python.framework import auto_control_deps
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors
from tensorflow.python.framework import func_graph
from tensorflow.python.framework import ops
from tensorflow.python.framework import sparse_tensor
from tensorflow.python.framework import tensor_spec
from tensorflow.python.framework import tensor_util
from tensorflow.python.keras import backend
from tensorflow.python.keras import constraints
from tensorflow.python.keras import initializers
from tensorflow.python.keras import regularizers
from tensorflow.python.keras.engine import base_layer_utils
from tensorflow.python.keras.engine import input_spec
from tensorflow.python.keras.engine import node as node_module
from tensorflow.python.keras.mixed_precision.experimental import autocast_variable
from tensorflow.python.keras.mixed_precision.experimental import policy
from tensorflow.python.keras.saving.saved_model import layer_serialization
from tensorflow.python.keras.utils import generic_utils
from tensorflow.python.keras.utils import tf_utils
# A module that only depends on `keras.layers` import these from here.
from tensorflow.python.keras.utils.generic_utils import to_snake_case # pylint: disable=unused-import
from tensorflow.python.keras.utils.tf_utils import is_tensor_or_tensor_list # pylint: disable=unused-import
from tensorflow.python.module import module
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import resource_variable_ops
from tensorflow.python.ops import variables as tf_variables
from tensorflow.python.ops.ragged import ragged_tensor
from tensorflow.python.platform import tf_logging
from tensorflow.python.training.tracking import base as trackable
from tensorflow.python.training.tracking import data_structures
from tensorflow.python.training.tracking import layer_utils as trackable_layer_utils
from tensorflow.python.training.tracking import tracking
from tensorflow.python.util import compat
from tensorflow.python.util import deprecation
from tensorflow.python.util import nest
from tensorflow.python.util import object_identity
from tensorflow.python.util import tf_inspect
from tensorflow.python.util.tf_export import keras_export
from tensorflow.tools.docs import doc_controls
import tensorflow as tf
class Keras_Model_for_Inference(tf.keras.Model):
'''
This class is created to solve an issue that 'tf.Tensor'
cannot be used in 'tf.cond' or 'tf.while_loop' while inference
due to an autograph problem. The inference function is a
clone of __call__ of 'tf.kreas.layers.Layer'. Inference
is implemented at '_inference'.
'''
def __init__(self, *args, **kwargs): # pylint: disable=super-init-not-called
super()
def inference(self, inputs, *args, **kwargs):
call_context = base_layer_utils.call_context()
input_list = nest.flatten(inputs)
# We will attempt to build a TF graph if & only if all inputs are symbolic.
# This is always the case in graph mode. It can also be the case in eager
# mode when all inputs can be traced back to `keras.Input()` (when building
# models using the functional API).
build_graph = tf_utils.are_all_symbolic_tensors(input_list)
# Accept NumPy and scalar inputs by converting to Tensors.
if any(isinstance(x, (np.ndarray, float, int)) for x in input_list):
def _convert_non_tensor(x):
# Don't call `ops.convert_to_tensor` on all `inputs` because
# `SparseTensors` can't be converted to `Tensor`.
if isinstance(x, (np.ndarray, float, int)):
return ops.convert_to_tensor(x)
return x
inputs = nest.map_structure(_convert_non_tensor, inputs)
input_list = nest.flatten(inputs)
# Handle `mask` propagation from previous layer to current layer. Masks can
# be propagated explicitly via the `mask` argument, or implicitly via
# setting the `_keras_mask` attribute on the inputs to a Layer. Masks passed
# explicitly take priority.
mask_arg_passed_by_framework = False
input_masks = self._collect_input_masks(inputs, args, kwargs)
if (self._expects_mask_arg and input_masks is not None and
not self._call_arg_was_passed('mask', args, kwargs)):
mask_arg_passed_by_framework = True
kwargs['mask'] = input_masks
# If `training` argument was not explicitly passed, propagate `training`
# value from this layer's calling layer.
training_arg_passed_by_framework = False
# Priority 1: `training` was explicitly passed.
if self._call_arg_was_passed('training', args, kwargs):
training_value = self._get_call_arg_value('training', args, kwargs)
if not self._expects_training_arg:
kwargs.pop('training')
else:
training_value = None
# Priority 2: `training` was passed to a parent layer.
if call_context.training is not None:
training_value = call_context.training
# Priority 3a: `learning_phase()` has been set.
elif backend.global_learning_phase_is_set():
training_value = backend.learning_phase()
# Priority 3b: Pass the `learning_phase()` if in the Keras FuncGraph.
elif build_graph:
with backend.get_graph().as_default():
if base_layer_utils.is_in_keras_graph():
training_value = backend.learning_phase()
if self._expects_training_arg and training_value is not None:
# Force the training_value to be bool type which matches to the contract
# for layer/model call args.
if tensor_util.is_tensor(training_value):
training_value = math_ops.cast(training_value, dtypes.bool)
else:
training_value = bool(training_value)
kwargs['training'] = training_value
training_arg_passed_by_framework = True
# Only create Keras history if at least one tensor originates from a
# `keras.Input`. Otherwise this Layer may be being used outside the Keras
# framework.
if build_graph and base_layer_utils.needs_keras_history(inputs):
base_layer_utils.create_keras_history(inputs)
# Clear eager losses on top level model call.
# We are clearing the losses only on the top level model call and not on
# every layer/model call because layer/model may be reused.
if (base_layer_utils.is_in_eager_or_tf_function() and
not call_context.in_call):
self._clear_losses()
with call_context.enter(self, inputs, build_graph, training_value):
# Check input assumptions set after layer building, e.g. input shape.
if build_graph:
# Symbolic execution on symbolic tensors. We will attempt to build
# the corresponding TF subgraph inside `backend.get_graph()`
# TODO(reedwm): We should assert input compatibility after the inputs
# are casted, not before.
input_spec.assert_input_compatibility(self.input_spec, inputs,
self.name)
if (any(isinstance(x, ragged_tensor.RaggedTensor) for x in input_list)
and self._supports_ragged_inputs is False): # pylint: disable=g-bool-id-comparison
raise ValueError('Layer %s does not support RaggedTensors as input. '
'Inputs received: %s. You can try converting your '
'input to an uniform tensor.' % (self.name, inputs))
graph = backend.get_graph()
with graph.as_default(), backend.name_scope(self._name_scope()):
# Build layer if applicable (if the `build` method has been
# overridden).
self._maybe_build(inputs)
cast_inputs = self._maybe_cast_inputs(inputs)
# Wrapping `call` function in autograph to allow for dynamic control
# flow and control dependencies in call. We are limiting this to
# subclassed layers as autograph is strictly needed only for
# subclassed layers and models.
# tf_convert will respect the value of autograph setting in the
# enclosing tf.function, if any.
if (base_layer_utils.is_subclassed(self) and
not base_layer_utils.from_saved_model(self)):
call_fn = autograph.tf_convert(
self._inference, ag_ctx.control_status_ctx())
else:
call_fn = self._inference
if not self.dynamic:
try:
with base_layer_utils.autocast_context_manager(
self._compute_dtype):
# Add auto_control_deps in V2 when they are not already added by
# a `tf.function`.
if (ops.executing_eagerly_outside_functions() and
not base_layer_utils.is_in_eager_or_tf_function()):
with auto_control_deps.AutomaticControlDependencies() as acd:
outputs = call_fn(cast_inputs, *args, **kwargs)
# Wrap Tensors in `outputs` in `tf.identity` to avoid
# circular dependencies.
outputs = base_layer_utils.mark_as_return(outputs, acd)
else:
outputs = call_fn(cast_inputs, *args, **kwargs)
except errors.OperatorNotAllowedInGraphError as e:
raise TypeError('You are attempting to use Python control '
'flow in a layer that was not declared to be '
'dynamic. Pass `dynamic=True` to the class '
'constructor.\nEncountered error:\n"""\n' +
str(e) + '\n"""')
else:
# We will use static shape inference to return symbolic tensors
# matching the specifications of the layer outputs.
# Since `self.dynamic` is True, we will never attempt to
# run the underlying TF graph (which is disconnected).
# TODO(fchollet): consider py_func as an alternative, which
# would enable us to run the underlying graph if needed.
outputs = self._symbolic_call(inputs)
if outputs is None:
raise ValueError('A layer\'s `call` method should return a '
'Tensor or a list of Tensors, not None '
'(layer: ' + self.name + ').')
if base_layer_utils.have_all_keras_metadata(inputs):
if training_arg_passed_by_framework:
kwargs.pop('training')
if mask_arg_passed_by_framework:
kwargs.pop('mask')
inputs, outputs = self._set_connectivity_metadata_(
inputs, outputs, args, kwargs)
self._handle_activity_regularization(inputs, outputs)
self._set_mask_metadata(inputs, outputs, input_masks)
if hasattr(self, '_set_inputs') and not self.inputs:
# Subclassed network: explicitly set metadata normally set by
# a call to self._set_inputs().
# TODO(b/120997007): This should be done in Eager as well, but
# causes garbage collection issues because of the placeholders
# created on the default Keras graph.
self._set_inputs(inputs, outputs)
else:
# Eager execution on data tensors.
with backend.name_scope(self._name_scope()):
self._maybe_build(inputs)
cast_inputs = self._maybe_cast_inputs(inputs)
with base_layer_utils.autocast_context_manager(
self._compute_dtype):
outputs = self._inference(cast_inputs, *args, **kwargs)
self._handle_activity_regularization(inputs, outputs)
self._set_mask_metadata(inputs, outputs, input_masks)
return outputs
def _inference(self, inputs, *args, **kwargs):
raise NotImplementedError
@trackable.no_automatic_dependency_tracking
def _init_graph_network(self, inputs, outputs, name=None, **kwargs):
generic_utils.validate_kwargs(
kwargs, {'trainable'},
'Functional models may only specify `name` and `trainable` keyword '
'arguments during initialization. Got an unexpected argument:')
# Normalize and set self.inputs, self.outputs.
if isinstance(inputs, list) and len(nest.flatten(inputs)) == 1:
inputs = inputs[0]
if isinstance(outputs, list) and len(nest.flatten(outputs)) == 1:
outputs = outputs[0]
self._nested_outputs = outputs
self._nested_inputs = inputs
self.inputs = nest.flatten(inputs)
self.outputs = nest.flatten(outputs)
if any(not hasattr(tensor, '_keras_history') for tensor in self.outputs):
base_layer_utils.create_keras_history(self._nested_outputs)
self._base_init(name=name, **kwargs)
self._validate_graph_inputs_and_outputs()
# A Network does not create weights of its own, thus it is already
# built.
self.built = True
self._compute_output_and_mask_jointly = True
self._is_graph_network = True
# `_expects_training_arg` is True since the `training` argument is always
# present in the signature of the `call` method of a graph network.
self._expects_training_arg = True
self._expects_mask_arg = True
# A graph network does not autocast inputs, as its layers will cast them
# instead.
self._autocast = False
self._input_layers = []
self._output_layers = []
self._input_coordinates = []
self._output_coordinates = []
self._supports_ragged_inputs = None
# This is for performance optimization when calling the Network on new
# inputs. Every time the Network is called on a set on input tensors,
# we compute the output tensors, output masks and output shapes in one pass,
# then cache them here. When any of these outputs is queried later, we
# retrieve it from there instead of recomputing it.
self._output_mask_cache = {}
self._output_tensor_cache = {}
self._output_shape_cache = {}
# Build self._output_layers:
for x in self.outputs:
layer, node_index, tensor_index = x._keras_history # pylint: disable=protected-access
self._output_layers.append(layer)
self._output_coordinates.append((layer, node_index, tensor_index))
# Build self._input_layers:
for x in self.inputs:
layer, node_index, tensor_index = x._keras_history # pylint: disable=protected-access
# It's supposed to be an input layer, so only one node
# and one tensor output.
assert node_index == 0
assert tensor_index == 0
self._input_layers.append(layer)
self._input_coordinates.append((layer, node_index, tensor_index))
# Keep track of the network's nodes and layers.
nodes, nodes_by_depth, layers, _ = _map_graph_network(
self.inputs, self.outputs)
self._network_nodes = nodes
self._nodes_by_depth = nodes_by_depth
self._layers = layers
self._layer_call_argspecs = {}
for layer in self._layers:
self._layer_call_argspecs[layer] = tf_inspect.getfullargspec(layer.call)
layer._attribute_sentinel.add_parent(self._attribute_sentinel)
self._track_layers(layers)
# Create the node linking internal inputs to internal outputs.
node_module.Node(
outbound_layer=self,
inbound_layers=[],
node_indices=[],
tensor_indices=[],
input_tensors=self._nested_inputs,
output_tensors=self._nested_outputs)
# Build self.input_names and self.output_names.
self._set_output_names()
self.input_names = []
self._feed_input_names = []
self._feed_inputs = []
self._feed_input_shapes = []
for layer in self._input_layers:
self.input_names.append(layer.name)
if layer.is_placeholder:
self._feed_input_names.append(layer.name)
# Use batch_input_shape here because non-eager composite tensors may not
# have a shape attribute that's meaningful (sparse, for instance, has
# a tensor that's non-constant and needs to be fed). This means that
# input layers that create placeholders will need to have the
# batch_input_shape attr to allow for input shape validation.
self._feed_input_shapes.append(layer._batch_input_shape)
self._feed_inputs.append(layer.input) | 53.022099 | 109 | 0.627019 | 16,036 | 0.835469 | 0 | 0 | 4,900 | 0.255288 | 0 | 0 | 5,449 | 0.283891 |
9333f46a3199aee3308e67cf5584433366413e92 | 761 | py | Python | tests/test_net_sender_proxy.py | nicoddemus/aioworkers | 4ab85064844dc28141833d1348989d8c891f3d7d | [
"Apache-2.0"
] | 45 | 2017-04-26T23:50:30.000Z | 2021-12-29T03:21:06.000Z | tests/test_net_sender_proxy.py | nicoddemus/aioworkers | 4ab85064844dc28141833d1348989d8c891f3d7d | [
"Apache-2.0"
] | 63 | 2017-08-01T10:35:45.000Z | 2022-03-01T18:07:49.000Z | tests/test_net_sender_proxy.py | nicoddemus/aioworkers | 4ab85064844dc28141833d1348989d8c891f3d7d | [
"Apache-2.0"
] | 6 | 2017-10-19T08:21:23.000Z | 2021-12-29T03:25:32.000Z | import pytest
@pytest.fixture
def config_yaml():
return """
local_sender:
cls: aioworkers.net.sender.proxy.Facade
queue: queue1
queue1:
cls: aioworkers.queue.base.Queue
worker:
cls: aioworkers.net.sender.proxy.Worker
autorun: true
input: queue1
sender: remote_sender
remote_sender:
cls: aioworkers.net.sender.proxy.Facade
queue: queue2
queue2:
cls: aioworkers.queue.base.Queue
"""
async def test_proxy_chains(context):
await context.local_sender.send(
to='example@example.com',
subject='test',
content='text',
html='<b>text</b>',
)
msg = await context.queue2.get()
assert msg['subject'] == 'test'
| 20.026316 | 47 | 0.603154 | 0 | 0 | 0 | 0 | 478 | 0.628121 | 263 | 0.345598 | 493 | 0.647832 |
9335654c2c674182289730fd81c0651f4e53ee52 | 2,794 | py | Python | train.py | olavosamp/kaggle_isic_2020 | d4680251997b65987f25d66a6d1d8aec1e71adaa | [
"MIT"
] | null | null | null | train.py | olavosamp/kaggle_isic_2020 | d4680251997b65987f25d66a6d1d8aec1e71adaa | [
"MIT"
] | null | null | null | train.py | olavosamp/kaggle_isic_2020 | d4680251997b65987f25d66a6d1d8aec1e71adaa | [
"MIT"
] | null | null | null | import torch
import torchvision
import numpy as np
import lib.model
from lib.model import MetadataModel, train_model
import lib.dataset
import lib.dirs as dirs
import lib.utils as utils
import lib.vis_utils as vutils
import lib.defines as defs
if __name__ == "__main__":
data_path = dirs.dataset
use_metadata = True
dataset_balance = True
loss_balance = not(dataset_balance)
batch_size = 64
learning_rate = 0.001
weight_decay = 0.0001
momentum = 0.9
epoch_number = 5
step_size = 20
gamma = 0.1
data_sample_size= 1. # This should be 1 for training with the entire dataset
freeze_conv = False
identifier = "sample_{:.0f}%_metadata_{}_loss-balance_{}_dataset-balance_{}_freeze_{}".format(
data_sample_size*100, use_metadata, loss_balance, dataset_balance, freeze_conv)
# Define image transformations
image_transform = utils.resnet_transforms(defs.IMAGENET_MEAN, defs.IMAGENET_STD)
# Create train and validation datasets
dataset = {}
dataset["train"] = lib.dataset.create_dataset(data_path, "csv/{}_set.csv".format("train"),
transform=image_transform["train"], balance=dataset_balance, sample=data_sample_size)
dataset["val"] = lib.dataset.create_dataset(data_path, "csv/{}_set.csv".format("val"),
transform=image_transform["val"], balance=False, sample=data_sample_size)
print("Train set size: {}.".format(len(dataset["train"])))
print("Validation set size: {}.".format(len(dataset["val"])))
# Load model
# resnet = torchvision.models.resnext50_32x4d(pretrained=True)
resnet = torchvision.models.resnet18(pretrained=True)
if use_metadata:
model_base = torch.nn.Sequential(*list(resnet.children())[:-1])
model = MetadataModel(model_base, base_out_dim=512) # Resnet18
# model = MetadataModel(model_base, base_out_dim=2048) # Resnext50
else:
resnet.fc = torch.nn.Linear(512, 2)
model = resnet
model.to(lib.model.device)
# optimizer = torch.optim.SGD(model.parameters(), lr=learning_rate,
# momentum=momentum, weight_decay=weight_decay)
optimizer = torch.optim.Adam(model.parameters(), lr=learning_rate,
betas=(0.85, 0.99), weight_decay=weight_decay)
scheduler = torch.optim.lr_scheduler.StepLR(optimizer, step_size=step_size,
gamma=gamma)
results_folder = train_model(model, dataset, batch_size, optimizer, scheduler, epoch_number,
use_metadata, loss_balance=loss_balance, identifier=identifier,
freeze_conv=freeze_conv)
vutils.plot_val_from_results(results_folder, dest_dir=None)
| 39.914286 | 107 | 0.67287 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 609 | 0.217967 |
9335a61ad4c952dbdf3a5969c7c3e1df83b69ab0 | 1,533 | py | Python | src/owncloud_rename.py | pzia/keepmydatas | 3783909119e7ece986a92b9e56ec53cddeb924e3 | [
"MIT"
] | 1 | 2020-07-17T02:14:39.000Z | 2020-07-17T02:14:39.000Z | src/owncloud_rename.py | pzia/keepmydatas | 3783909119e7ece986a92b9e56ec53cddeb924e3 | [
"MIT"
] | null | null | null | src/owncloud_rename.py | pzia/keepmydatas | 3783909119e7ece986a92b9e56ec53cddeb924e3 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Parse tree and find files matching owncloud forbidden characters.
Rename in place or move into a specific folder
"""
import KmdCmd
import KmdFiles
import os, re
import logging
class KmdOwncloudRename(KmdCmd.KmdCommand):
regexp = r'[\*:"?><|]+'
def extendParser(self):
super(KmdOwncloudRename, self).extendParser()
#Extend parser
self.parser.add_argument('folders', metavar='</path/to/tree>', nargs='+', help='The source tree')
self.parser.add_argument('--moveto', metavar='</path/to/folder>', nargs=1, default=None, help='Path to move bad named files')
def run(self):
owncloudre = re.compile(self.regexp)
for folder in self.args.folders :
logging.info("Running in %s", folder)
for root, _, files in os.walk(folder):
for i in files:
m = owncloudre.search("%s" % i)
if m is not None :
newname = owncloudre.sub('_', i)
logging.info("Renaming %s into %s", i, newname)
if self.args.doit :
origpath = os.path.join(root, i)
newpath = os.path.join(root, newname)
KmdFiles.fileMoveRename(origpath, newpath, self.args.doit)
logging.debug("Done : %s -> %s", origpath, newpath)
if __name__ == "__main__":
cmd = KmdOwncloudRename(__doc__)
cmd.run()
| 37.390244 | 133 | 0.55773 | 1,226 | 0.799739 | 0 | 0 | 0 | 0 | 0 | 0 | 366 | 0.238748 |
93367dc6cd9bf8f8bcaab3536826c212ad41d6ba | 13,975 | py | Python | gym_tetris/board.py | michielcx/Tetris-DQN | 66ee640aaa2068f3db4798ddf51f1518c3779cc6 | [
"MIT"
] | 13 | 2019-10-23T09:32:33.000Z | 2021-08-01T10:39:57.000Z | gym_tetris/board.py | michielcx/Tetris-DQN | 66ee640aaa2068f3db4798ddf51f1518c3779cc6 | [
"MIT"
] | 1 | 2022-02-10T00:29:36.000Z | 2022-02-10T00:29:36.000Z | gym_tetris/board.py | michielcx/Tetris-DQN | 66ee640aaa2068f3db4798ddf51f1518c3779cc6 | [
"MIT"
] | 6 | 2020-10-16T01:43:35.000Z | 2022-01-28T12:05:32.000Z | import random
def get_random_bag():
"""Returns a bag with unique pieces. (Bag randomizer)"""
random_shapes = list(SHAPES)
random.shuffle(random_shapes)
return [Piece(0, 0, shape) for shape in random_shapes]
class Shape:
def __init__(self, code, blueprints):
self.code = code
self.rotations = len(blueprints)
self.blueprints = blueprints
self.shape_coords = []
self.width = len(blueprints[0])
self.height = len(blueprints)
for rotation in range(self.rotations):
self.shape_coords.append(list(self._create_shape_coords(rotation)))
def _get_blueprint(self, rotation):
"""Returns a list of strings that defines how the shape looks like."""
return self.blueprints[rotation % self.rotations]
def get_shape_coords(self, rotation):
"""Returns a list of relative coordinates that make up the shape."""
return self.shape_coords[rotation % self.rotations]
def _create_shape_coords(self, rotation):
blueprint = self._get_blueprint(rotation)
width = len(blueprint[0])
height = len(blueprint)
for offset_y in range(height):
for offset_x in range(width):
if blueprint[offset_y][offset_x] != ' ':
yield offset_y, offset_x
SHAPE_I = Shape(1, [[
' ',
'####',
' ',
' ',
], [
' # ',
' # ',
' # ',
' # ',
]])
SHAPE_O = Shape(2, [[
'##',
'##',
]])
SHAPE_T = Shape(3, [[
' ',
'###',
' # ',
], [
' # ',
'## ',
' # ',
], [
' # ',
'###',
' ',
], [
' # ',
' ##',
' # ',
]])
SHAPE_S = Shape(4, [[
' ',
' ##',
'## ',
], [
' # ',
' ##',
' #',
]])
SHAPE_Z = Shape(5, [[
' ',
'## ',
' ##',
], [
' #',
' ##',
' # ',
]])
SHAPE_J = Shape(6, [[
' ',
'###',
' #',
], [
' # ',
' # ',
'## ',
], [
'# ',
'###',
' ',
], [
' ##',
' # ',
' # ',
]])
SHAPE_L = Shape(7, [[
' ',
'###',
'# ',
], [
'## ',
' # ',
' # ',
], [
' #',
'###',
' ',
], [
' # ',
' # ',
' ##',
]])
SHAPES = [SHAPE_I, SHAPE_O, SHAPE_T, SHAPE_S, SHAPE_Z, SHAPE_J, SHAPE_L]
class Piece:
def __init__(self, x, y, shape: Shape, rotation=0):
self.x = x
self.y = y
self.shape = shape
self.rotation = rotation
self.shape_coords = None
def rotate(self, dir_rotate):
"""Rotate the piece."""
self.rotation += dir_rotate
self.shape_coords = None
def move(self, x, y):
"""Move the piece."""
self.x += x
self.y += y
self.shape_coords = None
def get_shape_coords(self):
"""Returns a list of coordinates that the piece occupies."""
if self.shape_coords is None:
begin_x = self.x - round(self.shape.width / 2)
begin_y = self.y
shape_coords = self.shape.get_shape_coords(self.rotation)
self.shape_coords = [(begin_x + offset_x, begin_y + offset_y) for offset_y, offset_x in shape_coords]
return self.shape_coords
class Board:
def __init__(self, columns, rows):
self.columns = columns
self.rows = rows
self.pieces_table = [[0 for i in range(columns)] for j in range(rows)]
self.piece = None
self.piece_next = None
self.piece_holding = None
self.piece_last = None
self.can_hold = True
self.bag = get_random_bag()
self.create_piece()
def create_piece(self):
"""The next piece becomes the current piece and spawn it on the board."""
if self.piece_next is not None:
self.piece = self.piece_next
else:
self.piece = self.bag.pop()
self.piece.move(int(self.columns / 2), 0)
self.piece_next = self.bag.pop()
self.can_hold = True
if not self.bag:
self.bag = get_random_bag()
def _place_piece(self):
"""Solidify the current piece onto the board and returns success."""
coords = self.piece.get_shape_coords()
if any(x < 0 or x >= self.columns or y < 0 or y >= self.rows or self.pieces_table[y][x] != 0 for x, y in
coords):
return False
for x, y in coords:
self.pieces_table[y][x] = self.piece.shape.code
self.piece_last = self.piece
self.piece = None
return True
def can_move_piece(self, dir_x, dir_y):
"""Returns true if the piece does not intersect with a non-empty cell when moved."""
for x, y in self.piece.get_shape_coords():
next_x = x + dir_x
next_y = y + dir_y
if next_x < 0 or next_x >= self.columns or next_y < 0 or next_y >= self.rows:
return False
if self.pieces_table[next_y][next_x] != 0:
return False
return True
def move_piece(self, dir_x):
"""Move the piece in a direction and returns success."""
if self.piece is None:
return False
if not self.can_move_piece(dir_x, 0):
return False
self.piece.move(dir_x, 0)
return True
def drop_piece(self):
"""Drop the piece by one cell and returns success."""
if self.piece is None:
return False
if not self.can_move_piece(0, 1):
self._place_piece()
return True
self.piece.move(0, 1)
return False
def rotate_piece(self, dir_rotation):
"""Rotate the current piece and returns success."""
if self.piece is None:
return False
self.piece.rotate(dir_rotation)
if not self.can_move_piece(0, 0):
if not self.move_piece(-1) and not self.move_piece(1):
self.piece.rotate(-dir_rotation)
return False
return True
def is_game_over(self):
"""Returns if the current piece is able to move."""
return self.piece is not None and not self.can_move_piece(0, 0)
def is_row(self, y):
"""Returns if the row is a fully filled one."""
return 0 not in self.pieces_table[y]
def remove_row(self, y):
"""Removes a row from the board."""
removed_row = self.pieces_table.pop(y)
self.pieces_table.insert(0, [0 for i in range(self.columns)])
return removed_row
def insert_row(self, y, row):
"""Inserts a row into the board."""
self.pieces_table.pop(0)
self.pieces_table.insert(y, row)
def move_and_drop(self, x, rotation):
"""Move the piece and drop it as far down as possible and returns success."""
if self.piece is None:
return False
self.piece.rotate(rotation)
return self.can_move_piece(0, 0) and self.move_piece(-self.piece.x + x) and self.drop_piece_fully()
def drop_piece_fully(self):
"""Drops the current piece as far down as possible and returns success."""
if self.piece is None:
return False
while self.can_move_piece(0, 1):
self.piece.move(0, 1)
return self._place_piece()
def hold_piece(self):
"""Switches the piece held with the current and returns success."""
if self.piece is None or not self.can_hold:
return False
piece_current = self.piece
self.piece = self.piece_holding
self.piece_holding = piece_current
self.piece_holding.move(-self.piece_holding.x, -self.piece_holding.y)
if self.piece is None:
self.create_piece()
else:
self.piece.move(int(self.columns / 2), 2)
self.can_hold = False
return True
def get_possible_states(self):
"""Returns all possible states of the board with the corresponding action tuple.
Tries out every possible way to turn and move the current piece.
The action taken and the state of the board is combined into a tuple and added to the returning list
After every try the board is reset to original state.
:rtype: A list with a tuple of (action, state).
action = (column, rotation)
state = return value of `get_info`
"""
if self.piece is None:
return []
states = []
last_piece = self.piece_last
for rotation in range(self.piece.shape.rotations):
for column in range(self.columns + 1):
piece = Piece(self.piece.x, self.piece.y, self.piece.shape, self.piece.rotation)
# Execute
if self.move_and_drop(column, rotation):
rows_cleared = self.get_cleared_rows()
removed_rows = []
for y in rows_cleared:
removed_rows.append((y, self.remove_row(y)))
# Save
states.append(((column, rotation), self.get_info(rows_cleared)))
# Reset
for y, row in reversed(removed_rows):
self.insert_row(y, row)
for x, y in self.piece_last.get_shape_coords():
self.pieces_table[y][x] = 0
self.piece = piece
self.piece_last = last_piece
return states
def get_info(self, rows_cleared):
"""Returns the state of the board using statistics.
0: Rows cleared
1: Bumpiness
2: Holes
3: Landing height
4: Row transitions
5: Column transitions
6: Cumulative wells
7: Eroded piece cells
8: Aggregate height
:rtype: Integer array
"""
if self.piece_last is not None:
last_piece_coords = self.piece_last.get_shape_coords()
eroded_piece_cells = len(rows_cleared) * sum(y in rows_cleared for x, y in last_piece_coords)
landing_height = 0 if self.piece_last is None else 1 + self.rows - max(y for x, y in last_piece_coords)
else:
eroded_piece_cells = 0
landing_height = 0
return [
len(rows_cleared),
self.get_bumpiness(),
self.get_hole_count(),
landing_height,
self.get_row_transitions(),
self.get_column_transitions(),
self.get_cumulative_wells(),
eroded_piece_cells,
self.get_aggregate_height(),
]
def get_cleared_rows(self):
"""Returns the the amount of rows cleared."""
return list(filter(lambda y: self.is_row(y), range(self.rows)))
def get_row_transitions(self):
"""Returns the number of horizontal cell transitions."""
total = 0
for y in range(self.rows):
row_count = 0
last_empty = False
for x in range(self.columns):
empty = self.pieces_table[y][x] == 0
if last_empty != empty:
row_count += 1
last_empty = empty
if last_empty:
row_count += 1
if last_empty and row_count == 2:
continue
total += row_count
return total
def get_column_transitions(self):
"""Returns the number of vertical cell transitions."""
total = 0
for x in range(self.columns):
column_count = 0
last_empty = False
for y in reversed(range(self.rows)):
empty = self.pieces_table[y][x] == 0
if last_empty and not empty:
column_count += 2
last_empty = empty
if last_empty and column_count == 1:
continue
total += column_count
return total
def get_bumpiness(self):
"""Returns the total of the difference between the height of each column."""
bumpiness = 0
last_height = -1
for x in range(self.columns):
current_height = 0
for y in range(self.rows):
if self.pieces_table[y][x] != 0:
current_height = self.rows - y
break
if last_height != -1:
bumpiness += abs(last_height - current_height)
last_height = current_height
return bumpiness
def get_cumulative_wells(self):
"""Returns the sum of all wells."""
wells = [0 for i in range(self.columns)]
for y, row in enumerate(self.pieces_table):
left_empty = True
for x, code in enumerate(row):
if code == 0:
well = False
right_empty = self.columns > x + 1 >= 0 and self.pieces_table[y][x + 1] == 0
if left_empty or right_empty:
well = True
wells[x] = 0 if well else wells[x] + 1
left_empty = True
else:
left_empty = False
return sum(wells)
def get_aggregate_height(self):
"""Returns the sum of the heights of each column."""
aggregate_height = 0
for x in range(self.columns):
for y in range(self.rows):
if self.pieces_table[y][x] != 0:
aggregate_height += self.rows - y
break
return aggregate_height
def get_hole_count(self):
"""returns the number of empty cells covered by a full cell."""
hole_count = 0
for x in range(self.columns):
below = False
for y in range(self.rows):
empty = self.pieces_table[y][x] == 0
if not below and not empty:
below = True
elif below and empty:
hole_count += 1
return hole_count
| 28.873967 | 115 | 0.536887 | 12,773 | 0.913989 | 340 | 0.024329 | 0 | 0 | 0 | 0 | 2,579 | 0.184544 |
9336c65fb9f6e0d74ee67f3a8efdd3cd42889844 | 249 | py | Python | 03. DP/2xn tile 2.py | KLumy/Basic-Algorithm | e52e4200c1955a9062569814ff3418dd06666845 | [
"MIT"
] | 1 | 2021-01-22T15:58:32.000Z | 2021-01-22T15:58:32.000Z | 03. DP/2xn tile 2.py | KLumy/Basic-Algorithm | e52e4200c1955a9062569814ff3418dd06666845 | [
"MIT"
] | null | null | null | 03. DP/2xn tile 2.py | KLumy/Basic-Algorithm | e52e4200c1955a9062569814ff3418dd06666845 | [
"MIT"
] | null | null | null | import sys
input = sys.stdin.readline
n = int(input())
if n < 2:
print(n)
exit(0)
d = [0] * (n+1)
d[1] = 1
d[2] = 3
for i in range(n+1):
if i < 3:
continue
d[i] = (d[i-1] % 10007 + (d[i-2]*2) % 10007) % 10007
print(d[n])
| 13.105263 | 56 | 0.481928 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
93371a58fa9ad1c5d98cbf050ae2c7e71f6f8acc | 1,074 | py | Python | menu.py | dadiletta/Saber | 515751e7e9f0f12d97fa07606883eac73a352044 | [
"MIT",
"BSD-3-Clause"
] | null | null | null | menu.py | dadiletta/Saber | 515751e7e9f0f12d97fa07606883eac73a352044 | [
"MIT",
"BSD-3-Clause"
] | null | null | null | menu.py | dadiletta/Saber | 515751e7e9f0f12d97fa07606883eac73a352044 | [
"MIT",
"BSD-3-Clause"
] | null | null | null | import Light
__author__ = 'adilettad'
print("---------------")
print("----Welcome----")
print("------to-------")
print("-----Saber-----")
print("---------------")
sab = Light.Saber()
while True:
command = raw_input('Your command:')
if command == "blink":
sab.demoLED()
elif command == "dist" or command == "range":
sab.demoRange()
elif command == "watch" or command == "cover":
sab.coverCheck()
elif command =="knob":
sab.demoKnob()
elif command =="lcd":
sab.demoLCD()
elif command =="temp":
sab.demoTemp()
elif command =="buzzer":
sab.demoBuzzer()
elif command =="button":
sab.demoButton()
elif command =="clear":
sab.clear()
elif command =="maker":
sab.demoMaker()
elif command =="menu":
exampleMenu = ["One", "Two", "Three", "Four", "Five"]
print(sab.formMenu(exampleMenu)+1)
elif command =="nav":
sab.rootMenu()
elif command =="gui":
sab.launchGUI()
##Need Ctrl+C protection on all methods
| 22.851064 | 61 | 0.529795 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 276 | 0.256983 |
93388ca96a9f3273e22a1cd4ed97d8092f0d63f7 | 739 | py | Python | a10/build/lib/a10/asvr/types.py | THS-on/AttestationEngine | 6746d602215df43056b04515bd9b831f0bb0ddbb | [
"BSD-3-Clause-Clear"
] | null | null | null | a10/build/lib/a10/asvr/types.py | THS-on/AttestationEngine | 6746d602215df43056b04515bd9b831f0bb0ddbb | [
"BSD-3-Clause-Clear"
] | 4 | 2021-11-08T19:47:56.000Z | 2021-11-25T10:30:50.000Z | a10/build/lib/a10/asvr/types.py | THS-on/AttestationEngine | 6746d602215df43056b04515bd9b831f0bb0ddbb | [
"BSD-3-Clause-Clear"
] | null | null | null | # Copyright 2021 Nokia
# Licensed under the BSD 3-Clause License.
# SPDX-License-Identifier: BSD-3-Clause
import a10.structures.constants
import a10.structures.identity
import a10.structures.returncode
import a10.asvr.db.core
import a10.asvr.db.announce
import a10.asvr.elements
def getTypes():
"""Gets a list of all currently used types
:return: set of types
:rtype: Set
"""
ts = []
es = a10.asvr.elements.getElementsFull()
for e in es:
print("type=", e["type"],type(e["type"]))
# ugly - we keep everything as a flat list
for t in e["type"]:
ts.append(t)
print("ts =", ts)
# otherwise the set(List) breaks
# warned you!
return set(ts)
| 21.114286 | 50 | 0.630582 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 317 | 0.428958 |
9338957c2d0434f6f713ebe7005892553e33c34d | 456 | py | Python | packages/migrations/0004_auto_20210416_1013.py | dandeduck/package-tracking-web | f7cb3dffd6f7f6b7ced5b1106a049c79c192dfa5 | [
"MIT"
] | 1 | 2021-02-11T22:16:51.000Z | 2021-02-11T22:16:51.000Z | packages/migrations/0004_auto_20210416_1013.py | dandeduck/package-tracking-web | f7cb3dffd6f7f6b7ced5b1106a049c79c192dfa5 | [
"MIT"
] | 54 | 2021-02-11T18:52:11.000Z | 2021-06-13T13:45:01.000Z | packages/migrations/0004_auto_20210416_1013.py | dandeduck/package-tracking-web | f7cb3dffd6f7f6b7ced5b1106a049c79c192dfa5 | [
"MIT"
] | null | null | null | # Generated by Django 2.2.12 on 2021-04-16 10:13
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('packages', '0003_auto_20210416_1007'),
]
operations = [
migrations.AlterField(
model_name='address',
name='street_number',
field=models.PositiveSmallIntegerField(blank=True, default=0),
preserve_default=False,
),
]
| 22.8 | 74 | 0.620614 | 362 | 0.79386 | 0 | 0 | 0 | 0 | 0 | 0 | 107 | 0.234649 |
933b4da8586a2940f3a4d00ebc21c1950b2eb3db | 241 | py | Python | website/music/views.py | mrsmartpants/DjangoTutorial-Beginners | 11e64f4f982a576f814db5ed466f8b31bba5ae5c | [
"MIT"
] | null | null | null | website/music/views.py | mrsmartpants/DjangoTutorial-Beginners | 11e64f4f982a576f814db5ed466f8b31bba5ae5c | [
"MIT"
] | null | null | null | website/music/views.py | mrsmartpants/DjangoTutorial-Beginners | 11e64f4f982a576f814db5ed466f8b31bba5ae5c | [
"MIT"
] | null | null | null | from django.shortcuts import render
# Python functions - user is going to request an url
# Create your views here.
from django.http import HttpResponse
def index(request):
return HttpResponse("<h1> This is the music app homepage</h1>") | 30.125 | 67 | 0.763485 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 119 | 0.493776 |
933cd25c41f2ec4430b1ae8b3e9ce531b2bfd7b9 | 1,073 | py | Python | LeafNATS/modules/embedding/position_embedding.py | dumpmemory/AspDecSSCL | 004d73d3248e8fdee1336cfc6490ef4872583665 | [
"MIT"
] | 23 | 2020-12-16T15:50:07.000Z | 2022-03-17T05:51:20.000Z | LeafNATS/modules/embedding/position_embedding.py | dumpmemory/AspDecSSCL | 004d73d3248e8fdee1336cfc6490ef4872583665 | [
"MIT"
] | 3 | 2021-02-01T07:24:35.000Z | 2021-11-24T09:28:35.000Z | LeafNATS/modules/embedding/position_embedding.py | dumpmemory/AspDecSSCL | 004d73d3248e8fdee1336cfc6490ef4872583665 | [
"MIT"
] | 4 | 2021-05-12T06:49:43.000Z | 2021-10-04T04:41:08.000Z | '''
@author Tian Shi
Please contact tshi@vt.edu
'''
import math
import torch
class PositionalEmbedding(torch.nn.Module):
'''
Implementation of Positional Embedding.
'''
def __init__(self, hidden_size, device=torch.device("cpu")):
super().__init__()
self.hidden_size = hidden_size
self.posEmb = torch.zeros(10000, hidden_size, dtype=torch.float)
self.posEmb.require_grad = False
position = torch.arange(10000, dtype=torch.float).unsqueeze(1)
p_term1 = torch.arange(0, hidden_size, 2, dtype=torch.float)
p_term2 = - math.log(10000.0) / hidden_size
inv_term = torch.exp(p_term1 * p_term2)
posEmb_input = position * inv_term
self.posEmb[:, 0::2] = torch.sin(posEmb_input)
self.posEmb[:, 1::2] = torch.cos(posEmb_input)
self.posEmb = self.posEmb.unsqueeze(0).to(device)
def forward(self, input_):
'''
input_: Input sequence.
'''
seq_len = input_.size(1)
pos_emb = self.posEmb[:, :seq_len]
return pos_emb
| 26.170732 | 72 | 0.625349 | 992 | 0.924511 | 0 | 0 | 0 | 0 | 0 | 0 | 158 | 0.147251 |
933d30b34a1b5fd85d26dcd036d20209cd0759ee | 3,108 | py | Python | tests/tensor/sum_test.py | kbrodt/tor4 | d09740b746c534e67a72f492c7c03654f5888a46 | [
"MIT"
] | null | null | null | tests/tensor/sum_test.py | kbrodt/tor4 | d09740b746c534e67a72f492c7c03654f5888a46 | [
"MIT"
] | null | null | null | tests/tensor/sum_test.py | kbrodt/tor4 | d09740b746c534e67a72f492c7c03654f5888a46 | [
"MIT"
] | null | null | null | from tor4 import tensor
def test_tensor_sum():
a = tensor(data=[-1, 1, 2])
a_sum = a.sum()
assert a_sum.tolist() == 2
assert not a_sum.requires_grad
def test_tensor_sum_backward():
a = tensor(data=[-1, 1, 2.0], requires_grad=True)
a_sum = a.sum()
a_sum.backward()
assert a_sum.tolist() == 2
assert a_sum.requires_grad
assert a.grad.tolist() == [1, 1, 1]
def test_tensor_sum_backward2():
a = tensor(data=[-1, 1, 2.0], requires_grad=True)
a_sum = a.sum()
a_sum.backward(tensor(3))
assert a_sum.tolist() == 2
assert a_sum.requires_grad
assert a.grad.tolist() == [3, 3, 3]
def test_tensor_sum1_backward():
a = tensor(data=[[-1, 1, 2], [1, 2, 3.0]], requires_grad=True)
a_sum = a.sum(dim=1)
a_sum.backward(tensor(data=[2, 3]))
assert a_sum.tolist() == [2, 6]
assert a_sum.requires_grad
assert a.grad.tolist() == [[2, 2, 2], [3, 3, 3]]
def test_tensor_sum2_backward():
a = tensor(data=[[[-1], [1], [2]], [[1], [2], [3.0]]], requires_grad=True)
a_sum = a.sum(dim=1)
a_sum.backward(tensor(data=[[2], [3]]))
assert a_sum.tolist() == [[2], [6]]
assert a_sum.requires_grad
assert a.grad.tolist() == [[[2], [2], [2]], [[3], [3], [3]]]
def test_tensor_sum3_backward():
a = tensor(data=[[[-1], [1], [2]], [[1], [2], [3.0]]], requires_grad=True)
a_sum = a.sum()
a_sum.backward()
assert a_sum.tolist() == 8
assert a_sum.requires_grad
assert a.grad.tolist() == [[[1], [1], [1]], [[1], [1], [1]]]
def test_tensor_sum4_backward():
a = tensor(data=[[[-1], [1], [2]], [[1], [2], [3.0]]], requires_grad=True)
a_sum = a.sum(dim=(1, 0))
a_sum.backward()
assert a_sum.tolist() == [8]
assert a_sum.requires_grad
assert a.grad.tolist() == [[[1], [1], [1]], [[1], [1], [1]]]
def test_tensor_sum_keepdim1_backward():
a = tensor(data=[[-1, 1, 2], [1, 2, 3.0]], requires_grad=True)
a_sum = a.sum(dim=1, keepdim=True)
a_sum.backward(tensor(data=[[2], [3]]))
assert a_sum.tolist() == [[2], [6]]
assert a_sum.requires_grad
assert a.grad.tolist() == [[2, 2, 2], [3, 3, 3]]
def test_tensor_sum_keepdim2_backward():
a = tensor(data=[[[-1], [1], [2]], [[1], [2], [3.0]]], requires_grad=True)
a_sum = a.sum(dim=1, keepdim=True)
a_sum.backward(tensor(data=[[[2]], [[3]]]))
assert a_sum.tolist() == [[[2]], [[6]]]
assert a_sum.requires_grad
assert a.grad.tolist() == [[[2], [2], [2]], [[3], [3], [3]]]
def test_tensor_sum_keepdim3_backward():
a = tensor(data=[[[-1], [1], [2]], [[1], [2], [3.0]]], requires_grad=True)
a_sum = a.sum()
a_sum.backward()
assert a_sum.tolist() == 8
assert a_sum.requires_grad
assert a.grad.tolist() == [[[1], [1], [1]], [[1], [1], [1]]]
def test_tensor_sum_keepdim4_backward():
a = tensor(data=[[[-1], [1], [2]], [[1], [2], [3.0]]], requires_grad=True)
a_sum = a.sum(dim=(1, 0), keepdim=True)
a_sum.backward()
assert a_sum.tolist() == [[[8]]]
assert a_sum.requires_grad
assert a.grad.tolist() == [[[1], [1], [1]], [[1], [1], [1]]]
| 28.254545 | 78 | 0.560811 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
933e03200f3271fcfb99f615ab7c6f6d828aa8d2 | 3,150 | py | Python | Excite.py | JohnDoe2576/MyPythonCodes | 9d714bc9f9909af0ade4142439da518b30b7d51d | [
"MIT"
] | null | null | null | Excite.py | JohnDoe2576/MyPythonCodes | 9d714bc9f9909af0ade4142439da518b30b7d51d | [
"MIT"
] | null | null | null | Excite.py | JohnDoe2576/MyPythonCodes | 9d714bc9f9909af0ade4142439da518b30b7d51d | [
"MIT"
] | null | null | null | import numpy as np
import matplotlib.pyplot as plt
def aprbs(**parms):
# Generate an Amplitude modulated Pseudo-Random Binary Sequence (APRBS)
#
# The Pseudo-Random Binary Sequence (PRBS) is extensively used as an
# excitation signal for System Identification of linear systems. It is
# characterized by randomly delayed shifts in amplitude between a
# user-defined minimum and maximum. These delayed shifts are usually
# range-bound, and are very helpful in capturing the system behaviour
# close to the operating frequency of the system.
#
# A nonlinear system usually will have different behaviour at different
# amplitudes and cannot be predicted with the princlipe of superposition.
# Hence, the excitation signal also need to be modified to accomodate
# for capturing system behaviour at different amplitudes. The APRBS is
# an extension of PRBS by introducing randomly delayed shifts to random
# levels of range-bound amplitudes (rather than between a maximum and
# minimum).
#
# Input parameters:
# n_samples: Number of required samples
# alpha: tuple of (min_amplitude, max_amplitude)
# tau: tuple of (min_delay, max_delay)
# Extract signal parameters
n_samples = parms['n_samples'] # Number of samples
tau = parms['tau'] # Delay vector
alpha = parms['alpha'] # Amplitude vector
# Convert to usable parameters
tau_min = tau[0]
tau_max = tau[1]
tau_range = tau_max - tau_min
alpha_min = alpha[0]
alpha_max = alpha[1]
alpha_range = alpha_max - alpha_min
# Initialize arrays
tau_array = np.zeros((n_samples),dtype=int)
alpha_array = np.zeros((n_samples))
signal = np.zeros((n_samples))
# Initialize counters
sample_count = 0
shift_count = 0
while sample_count < n_samples:
# Generate a random shift to perturb 'tau' and 'alpha'
tau_shift = np.random.uniform(0.0, 1.0, 1)
alpha_shift = np.random.uniform(0.0, 1.0, 1)
# Introduce the random delay such that it range bound between 'tau_min' and 'tau_max'
tau_array[shift_count] = np.fix(tau_min + (tau_shift * tau_range) ).astype(int)
alpha_array[shift_count] = alpha_min + (alpha_shift * alpha_range)
# Update counters
sample_count += tau_array[shift_count]
shift_count += 1
tau_array[shift_count-1] -= (sample_count - n_samples)
idx = 0
for i in range(0,shift_count):
idx_tmp = idx + np.arange(0,tau_array[i],1,dtype=int)
signal[idx_tmp] = alpha_array[i]
idx = idx + tau_array[i]
return signal
# Time parameters
t0 = 0. # Start time
dt = 0.01 # Time step
t1 = 100. # End time
# Time vector
t = np.arange(t0, t1, dt)
# Signal parameters
n_samples = len(t)
alpha = (-2.5, 2.5)
tau = tuple(np.array([dt, 1.])/dt)
u = aprbs(n_samples=n_samples, alpha=alpha, tau=tau)
plt.plot(t,u)
plt.show()
| 35 | 94 | 0.634286 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,505 | 0.477778 |