hexsha
stringlengths 40
40
| size
int64 4
1.02M
| ext
stringclasses 8
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
209
| max_stars_repo_name
stringlengths 5
121
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
209
| max_issues_repo_name
stringlengths 5
121
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
209
| max_forks_repo_name
stringlengths 5
121
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 4
1.02M
| avg_line_length
float64 1.07
66.1k
| max_line_length
int64 4
266k
| alphanum_fraction
float64 0.01
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f88beebda3337359f9d1a8f379e8bd41f8a24a00
| 4,349
|
py
|
Python
|
lib/modules/python/situational_awareness/network/active_directory/get_computers.py
|
vinnybod/Empire
|
6ad0bcd171952da93f059348e4ae00e20154dce7
|
[
"BSD-3-Clause"
] | 3
|
2020-03-24T04:37:00.000Z
|
2021-04-07T06:05:16.000Z
|
lib/modules/python/situational_awareness/network/active_directory/get_computers.py
|
vinnybod/Empire
|
6ad0bcd171952da93f059348e4ae00e20154dce7
|
[
"BSD-3-Clause"
] | null | null | null |
lib/modules/python/situational_awareness/network/active_directory/get_computers.py
|
vinnybod/Empire
|
6ad0bcd171952da93f059348e4ae00e20154dce7
|
[
"BSD-3-Clause"
] | null | null | null |
from builtins import object
class Module(object):
def __init__(self, mainMenu, params=[]):
# metadata info about the module, not modified during runtime
self.info = {
# name for the module that will appear in module menus
'Name': 'Get Computers',
# list of one or more authors for the module
'Author': ['@424f424f'],
# more verbose multi-line description of the module
'Description': 'This module will list all computer objects from active directory',
# True if the module needs to run in the background
'Background' : False,
# File extension to save the file as
'OutputExtension' : "",
# if the module needs administrative privileges
'NeedsAdmin' : False,
# True if the method doesn't touch disk/is reasonably opsec safe
'OpsecSafe' : True,
# the module language
'Language' : 'python',
# the minimum language version needed
'MinLanguageVersion' : '2.6',
# list of any references/other comments
'Comments': ['']
}
# any options needed by the module, settable during runtime
self.options = {
# format:
# value_name : {description, required, default_value}
'Agent' : {
# The 'Agent' option is the only one that MUST be in a module
'Description' : 'Agent to run on.',
'Required' : True,
'Value' : ''
},
'LDAPAddress' : {
# The 'Agent' option is the only one that MUST be in a module
'Description' : 'LDAP IP/Hostname',
'Required' : True,
'Value' : ''
},
'BindDN' : {
# The 'Agent' option is the only one that MUST be in a module
'Description' : 'user@penlab.local',
'Required' : True,
'Value' : ''
},
'Password' : {
# The 'Agent' option is the only one that MUST be in a module
'Description' : 'Password to connect to LDAP',
'Required' : False,
'Value' : ''
}
}
# save off a copy of the mainMenu object to access external functionality
# like listeners/agent handlers/etc.
self.mainMenu = mainMenu
# During instantiation, any settable option parameters
# are passed as an object set to the module and the
# options dictionary is automatically set. This is mostly
# in case options are passed on the command line
if params:
for param in params:
# parameter format is [Name, Value]
option, value = param
if option in self.options:
self.options[option]['Value'] = value
def generate(self, obfuscate=False, obfuscationCommand=""):
LDAPAddress = self.options['LDAPAddress']['Value']
BindDN = self.options['BindDN']['Value']
password = self.options['Password']['Value']
# the Python script itself, with the command to invoke
# for execution appended to the end. Scripts should output
# everything to the pipeline for proper parsing.
#
# the script should be stripped of comments, with a link to any
# original reference script included in the comments.
script = """
import sys, os, subprocess, re
BindDN = "%s"
LDAPAddress = "%s"
password = "%s"
regex = re.compile('.+@([^.]+)\..+')
global tld
match = re.match(regex, BindDN)
tld = match.group(1)
global ext
ext = BindDN.split('.')[1]
cmd = \"""ldapsearch -x -h {} -b "dc={},dc={}" -D {} -w {} "(objectcategory=Computer)" ""\".format(LDAPAddress, tld, ext, BindDN, password)
output = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE)
output2 = subprocess.Popen(["grep", "name:"],stdin=output.stdout, stdout=subprocess.PIPE,universal_newlines=True)
output.stdout.close()
out,err = output2.communicate()
print("")
print(out)
""" % (BindDN, LDAPAddress, password)
return script
| 36.241667
| 139
| 0.549782
|
e627d236ac93b1838d483c846399af4ef48487bb
| 8,586
|
py
|
Python
|
universality/tov/ode/logenthalpy.py
|
isaaclegred/universality
|
9eac607a78b7cb67c1509ea68f4de631437f393a
|
[
"MIT"
] | 1
|
2021-06-02T13:41:46.000Z
|
2021-06-02T13:41:46.000Z
|
universality/tov/ode/logenthalpy.py
|
isaaclegred/universality
|
9eac607a78b7cb67c1509ea68f4de631437f393a
|
[
"MIT"
] | 25
|
2018-01-31T15:14:27.000Z
|
2021-01-10T21:32:05.000Z
|
universality/tov/ode/logenthalpy.py
|
isaaclegred/universality
|
9eac607a78b7cb67c1509ea68f4de631437f393a
|
[
"MIT"
] | 2
|
2018-12-07T04:04:33.000Z
|
2021-09-10T19:00:17.000Z
|
"""a module that houses TOV solvers based on the log(enthalpy per unit rest mass)
"""
__author__ = "Reed Essick (reed.essick@gmail.com)"
import numpy as np
from scipy.integrate import odeint
from universality.utils import utils
from universality.utils.units import (G, c2, Msun)
from .standard import (eta2lambda, omega2i)
from .standard import (initial_m, initial_mb, initial_eta, initial_omega)
from .standard import (dmdr, dmbdr, detadr, domegadr)
#-------------------------------------------------
DEFAULT_INITIAL_FRAC = 1e-8 ### the initial change in pressure we allow when setting the intial conditions
DEFAULT_RTOL = 1e-6
DEFAULT_MXSTEP = 10000
#------------------------
TWOPI = 2*np.pi
FOURPI = 2*TWOPI
Gc2 = G/c2
c2G = 1./Gc2
#-------------------------------------------------
### Formulation of the TOV equations in terms of the log(enthalpy per unit rest mass) = log( (eps+p)/rho )
#-------------------------------------------------
def eos2logh(pc2, ec2):
return utils.num_intfdx(np.log(pc2), pc2/(ec2+pc2)) ### thought to be more numerically stable given sparse samples of pc2 in the crust
#------------------------
def drdlogh(r, m, pc2):
return - r * (r*c2G - 2*m) / (m + FOURPI * r**3 * pc2)
def dmdlogh(r, epsc2, dr_dlogh):
return dmdr(r, epsc2) * dr_dlogh
def dmbdlogh(r, m, rho, dr_dlogh):
return dmbdr(r, rho, m) * dr_dlogh
def detadlogh(r, pc2, m, eta, epsc2, cs2c2, dr_dlogh):
return detadr(r, pc2, m, eta, epsc2, cs2c2) * dr_dlogh
def domegadlogh(r, pc2, m, omega, epsc2, dr_dlogh):
return domegadr(r, pc2, m, omega, epsc2) * dr_dlogh
#-------------------------------------------------
# initial conditions
#-------------------------------------------------
def initial_logh(loghi, frac):
return (1. - frac)*loghi ### assume a constant slope over a small change in the pressure
def initial_r(loghi, pc2i, ec2i, frac):
return ( 3.*frac*loghi*c2G / (TWOPI*(ec2i + 3.*pc2i)) )**0.5
#-------------------------------------------------
# central loop that solves the TOV equations given a set of coupled ODEs
#-------------------------------------------------
def engine(
logh,
vec,
eos,
dvecdlogh_func,
rtol=DEFAULT_RTOL,
mxstep=DEFAULT_MXSTEP,
):
"""integrate the TOV equations with central pressure "pc2i" and equation of state described by energy density "eps/c2" and pressure "p/c2"
expects eos = (logenthalpy, pressurec2, energy_densityc2, baryon_density, cs2c2)
"""
### integrate out until we hit termination condition
return odeint(
dvecdlogh_func,
vec,
(logh, 0.),
args=(eos,),
rtol=rtol,
mxstep=mxstep,
)[-1,:]
#-------------------------------------------------
### solver that yields all known macroscopic quantities
MACRO_COLS = ['M', 'R', 'Lambda', 'I', 'Mb'] ### the column names for what we compute
def dvecdlogh(vec, logh, eos):
eos0 = eos[0]
pc2 = np.interp(logh, eos0, eos[1])
ec2 = np.interp(logh, eos0, eos[2])
rho = np.interp(logh, eos0, eos[3])
cs2c2 = np.interp(logh, eos0, eos[4])
m, r, eta, omega, mb = vec
dr_dlogh = drdlogh(r, m, pc2)
return \
dmdlogh(r, ec2, dr_dlogh), \
dr_dlogh, \
detadlogh(r, pc2, m, eta, ec2, cs2c2, dr_dlogh), \
domegadlogh(r, pc2, m, omega, ec2, dr_dlogh), \
dmbdlogh(r, m, rho, dr_dlogh)
def initial_condition(pc2i, eos, frac=DEFAULT_INITIAL_FRAC):
'''analytically solve for the initial condition around the divergence at r=0
'''
eos1 = eos[1]
loghi = np.interp(pc2i, eos1, eos[0])
ec2i = np.interp(pc2i, eos1, eos[2])
rhoi = np.interp(pc2i, eos1, eos[3])
cs2c2i = np.interp(pc2i, eos1, eos[4])
logh = initial_logh(loghi, frac)
r = initial_r(loghi, ec2i, pc2i, frac)
m = initial_m(r, ec2i)
mb = initial_mb(r, ec2i)
eta = initial_eta(r, pc2i, ec2i, cs2c2i)
omega = initial_omega(r, pc2i, ec2i)
return logh, (m, r, eta, omega, mb)
def integrate(
pc2i,
eos,
initial_frac=DEFAULT_INITIAL_FRAC,
rtol=DEFAULT_RTOL,
):
"""integrate the TOV equations with central pressure "pc2i" and equation of state described by energy density "eps/c2" and pressure "p/c2"
expects eos = (logenthalpy, pressurec2, energy_densityc2, baryon_density, cs2c2)
"""
### define initial condition
logh, vec = initial_condition(pc2i, eos, frac=initial_frac)
m, r, eta, omega, mb = engine(
logh,
vec,
eos,
dvecdlogh,
rtol=rtol,
)
# compute tidal deformability
l = eta2lambda(r, m, eta)
# compute moment of inertia
i = omega2i(r, omega)
# convert to "standard" units
m /= Msun ### reported in units of solar masses, not grams
mb /= Msun
r *= 1e-5 ### convert from cm to km
i /= 1e45 ### normalize this to a common value but still in CGS
return m, r, l, i, mb
#-------------------------------------------------
## lightweight solver that only computes M, R
MACRO_COLS_MR = ['M', 'R']
def dvecdlogh_MR(vec, logh, eos):
eos0 = eos[0]
pc2 = np.interp(logh, eos0, eos[1])
ec2 = np.interp(logh, eos0, eos[2])
m, r = vec
dr_dlogh = drdlogh(r, m, pc2)
return \
dmdlogh(r, ec2, dr_dlogh), \
dr_dlogh
def initial_condition_MR(pc2i, eos, frac=DEFAULT_INITIAL_FRAC):
'''analytically solve for the initial condition around the divergence at r=0
'''
eos1 = eos[1]
loghi = np.interp(pc2i, eos1, eos[0])
ec2i = np.interp(pc2i, eos1, eos[2])
logh = initial_logh(loghi, frac)
r = initial_r(loghi, ec2i, pc2i, frac) ### NOTE: this is good enough for the M-R integrals
m = initial_m(r, ec2i) ### but we have to do something more complicated for the other perturbation equations
return logh, (m, r)
def integrate_MR(
pc2i,
eos,
initial_frac=DEFAULT_INITIAL_FRAC,
rtol=DEFAULT_RTOL,
):
"""integrate the TOV equations with central pressure "pc2i" and equation of state described by energy density "eps/c2" and pressure "p/c2"
expects eos = (logenthalpy, pressurec2, energy_densityc2, baryon_density, cs2c2)
"""
### define initial condition
logh, vec = initial_condition_MR(pc2i, eos, frac=initial_frac)
m, r = engine(
logh,
vec,
eos,
dvecdlogh_MR,
rtol=rtol,
)
# convert to "standard" units
m /= Msun ### reported in units of solar masses, not grams
r *= 1e-5 ### convert from cm to km
return m, r
#-------------------------------------------------
### lightweight solver that yields M, R, Lambda
### solver that yields all known macroscopic quantities
MACRO_COLS_MRLambda = ['M', 'R', 'Lambda'] ### the column names for what we compute
def dvecdlogh_MRLambda(vec, logh, eos):
eos0 = eos[0]
pc2 = np.interp(logh, eos0, eos[1])
ec2 = np.interp(logh, eos0, eos[2])
rho = np.interp(logh, eos0, eos[3])
cs2c2 = np.interp(logh, eos0, eos[4])
m, r, eta = vec
dr_dlogh = drdlogh(r, m, pc2)
return \
dmdlogh(r, ec2, dr_dlogh), \
dr_dlogh, \
detadlogh(r, pc2, m, eta, ec2, cs2c2, dr_dlogh)
def initial_condition_MRLambda(pc2i, eos, frac=DEFAULT_INITIAL_FRAC):
'''analytically solve for the initial condition around the divergence at r=0
'''
eos1 = eos[1]
loghi = np.interp(pc2i, eos1, eos[0])
ec2i = np.interp(pc2i, eos1, eos[2])
rhoi = np.interp(pc2i, eos1, eos[3])
cs2c2i = np.interp(pc2i, eos1, eos[4])
logh = initial_logh(loghi, frac)
r = initial_r(loghi, ec2i, pc2i, frac)
m = initial_m(r, ec2i)
eta = initial_eta(r, pc2i, ec2i, cs2c2i)
return logh, (m, r, eta)
def integrate_MRLambda(
pc2i,
eos,
initial_frac=DEFAULT_INITIAL_FRAC,
rtol=DEFAULT_RTOL,
):
"""integrate the TOV equations with central pressure "pc2i" and equation of state described by energy density "eps/c2" and pressure "p/c2"
expects eos = (logenthalpy, pressurec2, energy_densityc2, baryon_density, cs2c2)
"""
### define initial condition
logh, vec = initial_condition_MRLambda(pc2i, eos, frac=initial_frac)
m, r, eta = engine(
logh,
vec,
eos,
dvecdlogh_MRLambda,
rtol=rtol,
)
# compute tidal deformability
l = eta2lambda(r, m, eta)
# convert to "standard" units
m /= Msun ### reported in units of solar masses, not grams
r *= 1e-5 ### convert from cm to km
return m, r, l
| 29.916376
| 142
| 0.594573
|
e7497bb027f72f69bfbfe34c3b05acf8e4c6dad1
| 4,423
|
py
|
Python
|
src/encoded/dev_servers.py
|
4dn-dcic/fourfron
|
29601961706d2371b982e57ae085e8ebec3b2714
|
[
"MIT"
] | 11
|
2016-11-23T02:33:13.000Z
|
2021-06-18T14:21:20.000Z
|
src/encoded/dev_servers.py
|
4dn-dcic/fourfron
|
29601961706d2371b982e57ae085e8ebec3b2714
|
[
"MIT"
] | 1,159
|
2016-11-21T15:40:24.000Z
|
2022-03-29T03:18:38.000Z
|
src/encoded/dev_servers.py
|
4dn-dcic/fourfron
|
29601961706d2371b982e57ae085e8ebec3b2714
|
[
"MIT"
] | 5
|
2017-01-27T16:36:15.000Z
|
2019-06-14T14:39:54.000Z
|
"""\
Examples
For the development.ini you must supply the paster app name:
%(prog)s development.ini --app-name app --init --clear
"""
import argparse
import atexit
import logging
import os.path
import select
import shutil
import subprocess
import sys
from pkg_resources import resource_filename
from pyramid.paster import get_app, get_appsettings
from pyramid.path import DottedNameResolver
from snovault.elasticsearch import create_mapping
from snovault.tests import elasticsearch_fixture, postgresql_fixture
EPILOG = __doc__
logger = logging.getLogger(__name__)
def nginx_server_process(prefix='', echo=False):
args = [
os.path.join(prefix, 'nginx'),
'-c', resource_filename('encoded', 'nginx-dev.conf'),
'-g', 'daemon off;'
]
process = subprocess.Popen(
args,
close_fds=True,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
)
if not echo:
process.stdout.close()
if echo:
print('Started: http://localhost:8000')
return process
def main():
parser = argparse.ArgumentParser(
description="Run development servers", epilog=EPILOG,
formatter_class=argparse.RawDescriptionHelpFormatter,
)
parser.add_argument('--app-name', help="Pyramid app name in configfile")
parser.add_argument('config_uri', help="path to configfile")
parser.add_argument('--clear', action="store_true", help="Clear existing data")
parser.add_argument('--init', action="store_true", help="Init database")
parser.add_argument('--load', action="store_true", help="Load test set")
parser.add_argument('--datadir', default='/tmp/snovault', help="path to datadir")
args = parser.parse_args()
logging.basicConfig(format='')
# Loading app will have configured from config file. Reconfigure here:
logging.getLogger('encoded').setLevel(logging.INFO)
# get the config and see if we want to connect to non-local servers
config = get_appsettings(args.config_uri, args.app_name)
datadir = os.path.abspath(args.datadir)
pgdata = os.path.join(datadir, 'pgdata')
esdata = os.path.join(datadir, 'esdata')
### comment out from HERE...
if args.clear:
for dirname in [pgdata, esdata]:
if os.path.exists(dirname):
shutil.rmtree(dirname)
if args.init:
postgresql_fixture.initdb(pgdata, echo=True)
### ... to HERE to disable recreation of test db
### may have to `rm /tmp/snovault/pgdata/postmaster.pid`
postgres = postgresql_fixture.server_process(pgdata, echo=True)
elasticsearch = elasticsearch_fixture.server_process(esdata, echo=True)
nginx = nginx_server_process(echo=True)
processes = [postgres, elasticsearch, nginx]
@atexit.register
def cleanup_process():
for process in processes:
if process.poll() is None:
process.terminate()
for process in processes:
try:
for line in process.stdout:
sys.stdout.write(line.decode('utf-8'))
except IOError:
pass
process.wait()
app = get_app(args.config_uri, args.app_name)
# clear queues and initialize indices before loading data. No indexing yet.
# this is needed for items with properties stored in ES
if args.init:
create_mapping.run(app, skip_indexing=True, purge_queue=True)
if args.init and args.load:
load_test_data = app.registry.settings.get('load_test_data')
load_test_data = DottedNameResolver().resolve(load_test_data)
load_res = load_test_data(app)
if load_res: # None if successful
raise(load_res)
# now clear the queues and queue items for indexing
create_mapping.run(app, check_first=True, strict=True, purge_queue=True)
print('Started. ^C to exit.')
stdouts = [p.stdout for p in processes]
# Ugly should probably use threads instead
while True:
readable, writable, err = select.select(stdouts, [], stdouts, 5)
for stdout in readable:
for line in iter(stdout.readline, b''):
sys.stdout.write(line.decode('utf-8'))
if err:
for stdout in err:
for line in iter(stdout.readline, b''):
sys.stdout.write(line.decode('utf-8'))
break
if __name__ == '__main__':
main()
| 32.284672
| 85
| 0.660411
|
0bad86a6c82806ff6fc4557cf18f6ab99d3dee08
| 4,108
|
py
|
Python
|
Server Side/login_server.py
|
botexpert/Chatter
|
8cd3262e38c58eae0fcbc49b3973f883a3185a48
|
[
"MIT"
] | null | null | null |
Server Side/login_server.py
|
botexpert/Chatter
|
8cd3262e38c58eae0fcbc49b3973f883a3185a48
|
[
"MIT"
] | null | null | null |
Server Side/login_server.py
|
botexpert/Chatter
|
8cd3262e38c58eae0fcbc49b3973f883a3185a48
|
[
"MIT"
] | 2
|
2019-07-18T08:35:23.000Z
|
2019-07-18T09:24:00.000Z
|
'''Login server for zmq client-server-client communication'''
import hashlib
import sqlite3
import sys
import threading
import time
from uuid import uuid1
import zmq
from enums_server import Host
class LoginServer(threading.Thread):
'''Login server class'''
def __init__(self):
self.database = None
self.db_name = Host.DATABASE
self.context = zmq.Context.instance()
self.login_socket = self.context.socket(zmq.REP)
threading.Thread.__init__(self, daemon=True)
# Receives requests and unpacks their data.
# Calls for a credential check and generates a token if successful
def run(self):
'''Main server program, running all functionalities'''
self.login_socket.bind("tcp://{}:{}".format(Host.ADDRESS, Host.LOGIN_PORT))
print('Login socket bound!')
self.database = sqlite3.connect(Host.DATABASE)
cursor = self.database.cursor()
cursor.execute("""CREATE TABLE IF NOT EXISTS tokens(
username TEXT UNIQUE,token TEXT UNIQUE, timestamp TEXT)""")
self.database.commit()
try:
while True:
data = self.login_socket.recv_json() # recieves username and password
username = data['username']
if self.check_credentials(data, self.database):
cursor.execute("SELECT username FROM tokens")
if any(username == value for (value,) in cursor):
cursor.execute("UPDATE tokens SET timestamp = ? WHERE username = ?",
(str(round(time.time())), username))
print('UPDATE')
cursor.execute("SELECT token FROM tokens WHERE username = ?", (username,))
(token,) = cursor.fetchone()
self.database.commit()
else:
token = str(uuid1())
cursor.execute("INSERT INTO tokens VALUES (?,?,?)",
(username, token, str(round(time.time()))))
print('NEW USER')
self.database.commit()
reply = {'try_again': False,
'token': token}
self.login_socket.send_json(reply)
else:
token = 'Not allowed'
reply = {'try_again': True,
'token': token}
self.login_socket.send_json(reply)
except (KeyboardInterrupt, SystemExit):
print('\nClosing login server...')
sys.exit(1)
except zmq.ContextTerminated:
print('\nMain server Context unavailable,closing login server...')
sys.exit(0)
# Checks the database for the username and password pair.
def check_credentials(self, data, datab) -> bool:
'''Method for checking username,password pair credibility'''
username = data['username']
password = data['password']
enc_pass = self.pass_encript(username, password)
credentials = (username, enc_pass)
print(credentials)
cursor = datab.cursor()
cursor.execute("SELECT username,password FROM users")
# users = cursor.fetchall()
# if credentials in users:
if any(credentials == pair for pair in cursor):
print('Successful login for user {}'.format(username))
return True
print('Failed login attempt. Bad username {} or password {}.'.format(username,password))
return False
@staticmethod
def pass_encript(username, password):
'''Encription of password'''
salt = username.encode() + password.encode()
key = hashlib.pbkdf2_hmac(
'sha256', # The hash digest algorithm for HMAC
password.encode('utf-8'), # Convert the password to bytes
salt, # Provide the salt
100000 # It is recommended to use at least 100,000 iterations of SHA-256
)
return key
| 41.918367
| 98
| 0.564508
|
d898a53720eca759bc094e228504bdc94d7619fb
| 1,970
|
py
|
Python
|
test/test_response_page_metadata.py
|
Sage-Bionetworks/rocc-client
|
85b73afe7d4977094810c0a8094f56ebe7ed3d48
|
[
"Apache-2.0"
] | null | null | null |
test/test_response_page_metadata.py
|
Sage-Bionetworks/rocc-client
|
85b73afe7d4977094810c0a8094f56ebe7ed3d48
|
[
"Apache-2.0"
] | 14
|
2020-12-06T23:54:23.000Z
|
2021-02-03T18:35:02.000Z
|
test/test_response_page_metadata.py
|
Sage-Bionetworks/rocc-client
|
85b73afe7d4977094810c0a8094f56ebe7ed3d48
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
Registry of Open Community Challenge API
The OpenAPI specification implemented by the Challenge Registries. # Introduction TBA # noqa: E501
The version of the OpenAPI document: 0.1.0
Contact: thomas.schaffter@sagebionetworks.org
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import unittest
import datetime
import roccclient
from roccclient.models.response_page_metadata import ResponsePageMetadata # noqa: E501
from roccclient.models.response_page_metadata_links import ResponsePageMetadataLinks # noqa: E501
from roccclient.rest import ApiException
class TestResponsePageMetadata(unittest.TestCase):
"""ResponsePageMetadata unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def make_instance(self, include_optional):
"""Test ResponsePageMetadata
include_option is a boolean, when False only required
params are included, when True both required and
optional params are included """
# model = roccclient.models.response_page_metadata.ResponsePageMetadata() # noqa: E501
if include_optional :
return ResponsePageMetadata(
offset = 56,
limit = 56,
links = roccclient.models.response_page_metadata_links.ResponsePageMetadataLinks(
next = '0', )
)
else :
return ResponsePageMetadata(
offset = 56,
limit = 56,
links = roccclient.models.response_page_metadata_links.ResponsePageMetadataLinks(
next = '0', ),
)
def testResponsePageMetadata(self):
"""Test ResponsePageMetadata"""
inst_req_only = self.make_instance(include_optional=False)
inst_req_and_optional = self.make_instance(include_optional=True)
if __name__ == '__main__':
unittest.main()
| 31.774194
| 104
| 0.671574
|
bacc360a59865d9f9b819550c3bf9b29dea9201d
| 4,561
|
py
|
Python
|
eplusplus/model/processManager.py
|
labeee/EPlusPlus
|
da6cbd60575146a8f165fb72e165919cd83ddc24
|
[
"MIT"
] | 1
|
2018-02-06T17:41:12.000Z
|
2018-02-06T17:41:12.000Z
|
eplusplus/model/processManager.py
|
labeee/EPlusPlus
|
da6cbd60575146a8f165fb72e165919cd83ddc24
|
[
"MIT"
] | null | null | null |
eplusplus/model/processManager.py
|
labeee/EPlusPlus
|
da6cbd60575146a8f165fb72e165919cd83ddc24
|
[
"MIT"
] | 1
|
2021-06-29T02:49:59.000Z
|
2021-06-29T02:49:59.000Z
|
import os
import subprocess
import multiprocessing
##
## @brief Class for process manager. This class is responsible for create
## tasks and processes to execute each task. The main responsibility
## of this class is to create a number of subprocess equal to
## the number of the cores of the current machine. After that, each
## process will receive a task to run inside the energy plus. Then
## it will wait all processes finish. If not tasks were been
## concluded, then will repeat the processes until no more tasks
## remaing. The os import is necessary to list files in directory.
## The subprocess is used to create new subprocess and the
## multiprocessing is used to get the number of cpu's in the
## currente PC.
##
class ProcessManager(object):
def __init__(self):
super(ProcessManager, self).__init__()
self.numCores = multiprocessing.cpu_count()
self.idfFiles = []
self.tasks = []
##
## @brief This method get all idf files in the folder passed as
## argument that were been created by EPlusPlus (see
## "getIDFFIles" documentation for more info.) After check
## what is the current OS, will iterate by each idf file
## creating a string formated, that later will be interpreted
## as a task by the subprocesses. At the end just append the
## task in a list. Finally, set the attibute "tasks" to the
## new list that we just created.
##
## @param self Non static method.
## @param platformManager Used to check what is the current OS.
## @param pathToEpw The path to epw file.
## @param pathToFolder The path to folder that has the IDF files
## generated by the EPlusPlus.
## @param fileManager File maanger used to filter the IDF files.
##
## @return This is a void method.
##
def createTasks(self, platformManager, pathToEpw, pathToFolder, fileManager):
self.idfFiles = fileManager.getIDFFiles(pathToFolder)
tasks = []
processes = []
if platformManager.isWindows():
for i in range(0, len(self.idfFiles)):
idfFile = self.idfFiles.pop()
absPath = str(pathToFolder) + "/" + str(idfFile)
output = absPath[:-4]
cmd = ["C:/EnergyPlusV8-7-0/energyplus.exe", "-w", pathToEpw, "-d", output, "-r", absPath]
tasks.append(cmd)
elif platformManager.isLinux():
for i in range(0, len(self.idfFiles)):
idfFile = self.idfFiles.pop()
absPath = str(pathToFolder) + "/" + str(idfFile)
output = absPath[:-4]
cmd = ["runenergyplus", "-w", pathToEpw, "-d", output, "-r", absPath]
tasks.append(cmd)
self.tasks = tasks
##
## @brief This method create tasks to be executed. After that,
## will iterate in a while loop until no more tasks remaing.
## In each iteration, will execute the tasks in the
## "runEplus" method (see its documentation for more info).
##
## @param self Non static method
## @param platformManager Used to check what is the current OS.
## @param pathToEpw The path to epw file.
## @param pathToFolder The path to folder that has the IDF files
## generated by the EPlusPlus.
## @param fileManager File maanger used to filter the IDF files.
##
## @return This is a void method.
##
def executeTasks(self, platformManager, pathToEpw, pathToFolder, fileManager):
self.createTasks(platformManager, pathToEpw, pathToFolder, fileManager)
while len(self.tasks) > 0:
self.runEPlus()
##
## @brief This method creates a for loop that iterate so many times
## as the number of CPU's. For each iteration, it will remove
## a task from the tasks list and create a subprocess to
## execute that task. The processes list has all processes
## created and at the end of method we iterate through this
## list so we can wait until all subprocesses finish.
##
## @param self Non static method
##
## @return This is a void method
##
def runEPlus(self):
processes = []
for i in range(0, self.numCores):
try:
task = self.tasks.pop()
except Exception as e:
continue
if task:
process = subprocess.Popen(task)
processes.append(process)
for process in processes:
process.wait()
del processes
| 40.362832
| 94
| 0.629248
|
937a28c598a4a049f2779c0a6611a54480f0f9a8
| 2,663
|
py
|
Python
|
samples/methodFaultsAckFaults.py
|
noironetworks/UcsPythonSDK
|
bf6b07d6abeacb922c92b198352eda4eb9e4629b
|
[
"Apache-2.0"
] | 13
|
2015-01-16T15:36:56.000Z
|
2019-04-12T02:48:53.000Z
|
samples/methodFaultsAckFaults.py
|
noironetworks/UcsPythonSDK
|
bf6b07d6abeacb922c92b198352eda4eb9e4629b
|
[
"Apache-2.0"
] | 4
|
2015-06-12T03:36:11.000Z
|
2015-07-29T18:39:42.000Z
|
samples/methodFaultsAckFaults.py
|
noironetworks/UcsPythonSDK
|
bf6b07d6abeacb922c92b198352eda4eb9e4629b
|
[
"Apache-2.0"
] | 13
|
2015-07-01T09:17:37.000Z
|
2020-03-10T03:26:25.000Z
|
#!/usr/bin/python
# Copyright 2013 Cisco Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This script acknowledge all the existing UCSM faults via UCS Manager
# method "FaultAckFaults".
# Usage: methodFaultsAckFaults.py [options]
#
# Options:
# -h, --help show this help message and exit
# -i IP, --ip=IP [Mandatory] UCSM IP Address
# -u USERNAME, --username=USERNAME
# [Mandatory] Account Username for UCSM Login
# -p PASSWORD, --password=PASSWORD
# [Mandatory] Account Password for UCSM Login
#
import getpass
import optparse
from UcsSdk import *
def getpassword(prompt):
if platform.system() == "Linux":
return getpass.unix_getpass(prompt=prompt)
elif platform.system() == "Windows" or platform.system() == "Microsoft":
return getpass.win_getpass(prompt=prompt)
else:
return getpass.getpass(prompt=prompt)
if __name__ == "__main__":
handle = UcsHandle()
try:
parser = optparse.OptionParser()
parser.add_option('-i', '--ip',dest="ip",
help="[Mandatory] UCSM IP Address")
parser.add_option('-u', '--username',dest="userName",
help="[Mandatory] Account Username for UCSM Login")
parser.add_option('-p', '--password',dest="password",
help="[Mandatory] Account Password for UCSM Login")
(options, args) = parser.parse_args()
if not options.ip:
parser.print_help()
parser.error("Provide UCSM IP Address")
if not options.userName:
parser.print_help()
parser.error("Provide UCSM UserName")
if not options.password:
options.password=getpassword("UCSM Password:")
handle.Login(options.ip,options.userName,options.password)
idSet = IdSet()
getRsp = handle.GetManagedObject(None, FaultInst.ClassId())
for mo in getRsp:
id = Id()
id.Value = mo.Id
idSet.AddChild(id)
handle.FaultAckFaults(idSet)
handle.Logout()
except Exception, err:
handle.Logout()
print "Exception:", str(err)
import traceback, sys
print '-'*60
traceback.print_exc(file=sys.stdout)
print '-'*60
| 31.702381
| 75
| 0.668795
|
5a5403a36b7ffed76079ab817aac7aca3bbb7b44
| 2,802
|
py
|
Python
|
venv/Lib/site-packages/PyOpenGL-3.0.1/OpenGL/platform/glx.py
|
temelkirci/Motion_Editor
|
a8b8d4c4d2dcc9be28385600f56066cef92a38ad
|
[
"MIT"
] | 1
|
2022-03-02T17:07:20.000Z
|
2022-03-02T17:07:20.000Z
|
venv/Lib/site-packages/PyOpenGL-3.0.1/OpenGL/platform/glx.py
|
temelkirci/RealTime_6DOF_Motion_Editor
|
a8b8d4c4d2dcc9be28385600f56066cef92a38ad
|
[
"MIT"
] | null | null | null |
venv/Lib/site-packages/PyOpenGL-3.0.1/OpenGL/platform/glx.py
|
temelkirci/RealTime_6DOF_Motion_Editor
|
a8b8d4c4d2dcc9be28385600f56066cef92a38ad
|
[
"MIT"
] | null | null | null |
"""GLX (x-windows)-specific platform features"""
import ctypes, ctypes.util
from OpenGL.platform import baseplatform, ctypesloader
assert hasattr( ctypes, 'RTLD_GLOBAL' ), """Old ctypes without ability to load .so for global resolution: Get ctypes CVS branch_1_0, not CVS HEAD or released versions!"""
class GLXPlatform( baseplatform.BasePlatform ):
"""Posix (Linux, FreeBSD, etceteras) implementation for PyOpenGL"""
# On Linux (and, I assume, most GLX platforms, we have to load
# GL and GLU with the "global" flag to allow GLUT to resolve its
# references to GL/GLU functions).
try:
GL = OpenGL = ctypesloader.loadLibrary(
ctypes.cdll,
'GL',
mode=ctypes.RTLD_GLOBAL
)
except OSError, err:
raise ImportError("Unable to load OpenGL library", *err.args)
try:
GLU = ctypesloader.loadLibrary(
ctypes.cdll,
'GLU',
mode=ctypes.RTLD_GLOBAL
)
except OSError, err:
GLU = None
# glut shouldn't need to be global, but just in case a dependent library makes
# the same assumption GLUT does...
try:
GLUT = ctypesloader.loadLibrary(
ctypes.cdll,
'glut',
mode=ctypes.RTLD_GLOBAL
)
except OSError, err:
GLUT = None
# GLX doesn't seem to have its own loadable module?
GLX = GL
glXGetProcAddressARB = GL.glXGetProcAddressARB
glXGetProcAddressARB.restype = ctypes.c_void_p
getExtensionProcedure = staticmethod( glXGetProcAddressARB )
try:
GLE = ctypesloader.loadLibrary(
ctypes.cdll,
'gle',
mode=ctypes.RTLD_GLOBAL
)
except OSError, err:
GLE = None
DEFAULT_FUNCTION_TYPE = staticmethod( ctypes.CFUNCTYPE )
# This loads the GLX functions from the GL .so, not sure if that's
# really kosher...
GetCurrentContext = CurrentContextIsValid = staticmethod(
GL.glXGetCurrentContext
)
def getGLUTFontPointer( self, constant ):
"""Platform specific function to retrieve a GLUT font pointer
GLUTAPI void *glutBitmap9By15;
#define GLUT_BITMAP_9_BY_15 (&glutBitmap9By15)
Key here is that we want the addressof the pointer in the DLL,
not the pointer in the DLL. That is, our pointer is to the
pointer defined in the DLL, we don't want the *value* stored in
that pointer.
"""
name = [ x.title() for x in constant.split( '_' )[1:] ]
internal = 'glut' + "".join( [x.title() for x in name] )
pointer = ctypes.c_void_p.in_dll( self.GLUT, internal )
return ctypes.c_void_p(ctypes.addressof(pointer))
safeGetError = staticmethod( OpenGL.glGetError )
| 35.468354
| 170
| 0.632762
|
3e3cccc72664ce12761c8e6f8742db179bf80074
| 527
|
py
|
Python
|
096.err.py
|
liuyang1/euler
|
ba6c79b3f809711eec07a7843ec60c86990564d1
|
[
"MIT"
] | null | null | null |
096.err.py
|
liuyang1/euler
|
ba6c79b3f809711eec07a7843ec60c86990564d1
|
[
"MIT"
] | null | null | null |
096.err.py
|
liuyang1/euler
|
ba6c79b3f809711eec07a7843ec60c86990564d1
|
[
"MIT"
] | null | null | null |
def solve(sdk):
def getSudoku():
with open('096.data') as f:
cnt = 0
lst = []
for line in f.readlines():
if cnt != 0:
line = line.strip()
line = [int(i) for i in line]
lst.append(line)
cnt += 1
if cnt == 10:
yield lst
cnt = 0
lst = []
def main():
sudoku = getSudoku()
for i in range(50):
print sudoku.next()
if __name__ == "__main__":
main()
| 19.518519
| 45
| 0.40797
|
948e72e843b89dd21bb5c55a2763ef4f0b5f6c3d
| 3,084
|
py
|
Python
|
facebook.py
|
dustydevelops/aSocialWizard
|
3113e5eca38db25b792da20595f3280574b3dfe2
|
[
"MIT"
] | 1
|
2020-05-03T22:59:34.000Z
|
2020-05-03T22:59:34.000Z
|
facebook.py
|
dustydevelops/SocialWizard-
|
3113e5eca38db25b792da20595f3280574b3dfe2
|
[
"MIT"
] | null | null | null |
facebook.py
|
dustydevelops/SocialWizard-
|
3113e5eca38db25b792da20595f3280574b3dfe2
|
[
"MIT"
] | null | null | null |
# SOCIAL WIZARD #
from selenium import webdriver
import time
import sys
# User Credentials #
email = 'darealjoeshmo@gmail.com'
password = 'youllneverguessit'
tellThem = 'Happy Birthday!'
# Start Facebook Engagement #
tellThem = 'HappyBirthday!'
x = 1
while True:
print('Iteration:', x)
dr = webdriver.Safari()
dr.get('https://www.facebook.com/login')
element = dr.find_elements_by_xpath('//*[@id ="email"]')
element[0].send_keys(email)
print("Username Entered")
element = dr.find_element_by_xpath('//*[@id ="pass"]')
element.send_keys(password)
print("Password Entered")
login_button = dr.find_element_by_xpath('//*[@id="loginbutton"]')
login_button.click() # click the login button
print("Login Successfull")
time.sleep(3)
dr.get('https://www.facebook.com/events/birthdays')
time.sleep(3)
try:
element = dr.find_element_by_xpath('//*[@id="u_0_12"]')
element.send_keys(tellThem)
sleep(1)
element.submit()
print("Birthday wish 1 Successfull")
except:
pass
try:
element = dr.find_element_by_xpath('//*[@id="u_0_13"]')
element.send_keys(tellThem)
time.sleep(1)
element.submit()
print("a birthday wish was made")
except:
pass
try:
element = dr.find_element_by_xpath('//*[@id="u_0_16"]')
element.send_keys(tellThem)
time.sleep(1)
element.submit()
print("A birthday wish was made")
except:
pass
try:
element = dr.find_element_by_xpath('//*[@id="u_0_17"]')
element.send_keys(tellThem)
time.sleep(1)
element.submit()
print("A birthday wish was made")
except:
pass
try:
element = dr.find_element_by_xpath('//*[@id="u_0_1a"]')
element.send_keys(tellThem)
time.sleep(1)
element.submit()
print("A birthday wish was made")
except:
pass
try:
element = dr.find_element_by_xpath('//*[@id="u_0_1d"]')
element.send_keys(tellThem)
time.sleep(1)
element.submit()
print("A birthday wish was made")
except:
pass
print('Birthday post process complete')
print('Iteration', x , 'Was completed successfully.')
print('In 2 hours the appication will recheck for new/missed birthdays.','\n',
'Please do not power down your system ')
dr.close()
time.sleep(7200)
| 27.535714
| 86
| 0.487354
|
251829be7f220467dd7db38998c87a64f3f6b7c8
| 6,384
|
py
|
Python
|
moderngl_window/scene/scene.py
|
yoyonel/moderngl-window
|
3dc986c97802757ce9208a9bb692498f4732c5cd
|
[
"MIT"
] | null | null | null |
moderngl_window/scene/scene.py
|
yoyonel/moderngl-window
|
3dc986c97802757ce9208a9bb692498f4732c5cd
|
[
"MIT"
] | null | null | null |
moderngl_window/scene/scene.py
|
yoyonel/moderngl-window
|
3dc986c97802757ce9208a9bb692498f4732c5cd
|
[
"MIT"
] | null | null | null |
"""
Wrapper for a loaded scene with properties.
"""
import logging
import numpy
from pyrr import matrix44, vector3
import moderngl
import moderngl_window as mglw
from moderngl_window.resources import programs
from moderngl_window.meta import ProgramDescription
from moderngl_window import geometry
from .programs import (
ColorProgram,
FallbackProgram,
MeshProgram,
TextureProgram,
)
logger = logging.getLogger(__name__)
class Scene:
"""Generic scene"""
def __init__(self, name, **kwargs):
"""Create a scene with a name.
Args:
name (str): Unique name or path for the scene
"""
self.name = name
self.root_nodes = []
# References resources in the scene
self.nodes = []
self.materials = []
self.meshes = []
self.cameras = []
self.bbox_min = None # Type: numpy.ndarray
self.bbox_max = None # Type: numpy.ndarray
self.diagonal_size = 1.0
self.bbox_vao = geometry.bbox()
self.bbox_program = programs.load(
ProgramDescription(path='scene_default/bbox.glsl'),
)
self._model_matrix = matrix44.create_identity()
@property
def ctx(self) -> moderngl.Context:
"""moderngl.Context: The current context"""
return mglw.ctx()
@property
def model_matrix(self) -> numpy.ndarray:
"""numpy.ndarray: The current model matrix
This property is settable.
"""
return self._model_matrix
@model_matrix.setter
def model_matrix(self, matrix: numpy.ndarray):
self._model_matrix = matrix.astype('f4')
for node in self.root_nodes:
node.calc_model_mat(self._model_matrix)
def draw(self, projection_matrix: numpy.ndarray = None, camera_matrix: numpy.ndarray = None, time=0.0) -> None:
"""Draw all the nodes in the scene.
Args:
projection_matrix (ndarray): projection matrix (bytes)
camera_matrix (ndarray): camera_matrix (bytes)
time (float): The current time
"""
projection_matrix = projection_matrix.astype('f4').tobytes()
camera_matrix = camera_matrix.astype('f4').tobytes()
for node in self.root_nodes:
node.draw(
projection_matrix=projection_matrix,
camera_matrix=camera_matrix,
time=time,
)
self.ctx.clear_samplers(0, 4)
def draw_bbox(self, projection_matrix=None, camera_matrix=None, children=True) -> None:
"""Draw scene and mesh bounding boxes.
Args:
projection_matrix (ndarray): mat4 projection
camera_matrix (ndarray): mat4 camera matrix
children (bool): Will draw bounding boxes for meshes as well
"""
projection_matrix = projection_matrix.astype('f4').tobytes()
camera_matrix = camera_matrix.astype('f4').tobytes()
# Scene bounding box
self.bbox_program["m_proj"].write(projection_matrix)
self.bbox_program["m_model"].write(self._model_matrix.astype('f4').tobytes())
self.bbox_program["m_cam"].write(camera_matrix)
self.bbox_program["bb_min"].write(self.bbox_min.astype('f4').tobytes())
self.bbox_program["bb_max"].write(self.bbox_max.astype('f4').tobytes())
self.bbox_program["color"].value = (1.0, 0.0, 0.0)
self.bbox_vao.render(self.bbox_program)
if not children:
return
# Draw bounding box for children
for node in self.root_nodes:
node.draw_bbox(projection_matrix, camera_matrix, self.bbox_program, self.bbox_vao)
def apply_mesh_programs(self, mesh_programs=None) -> None:
"""Applies mesh programs to meshes.
If not mesh programs are passed in we assign default ones.
Args:
mesh_programs (list): List of mesh programs to assign
"""
if not mesh_programs:
mesh_programs = [ColorProgram(), TextureProgram(), FallbackProgram()]
for mesh in self.meshes:
for mesh_prog in mesh_programs:
instance = mesh_prog.apply(mesh)
if instance is not None:
if isinstance(instance, MeshProgram):
mesh.mesh_program = mesh_prog
break
else:
raise ValueError("apply() must return a MeshProgram instance, not {}".format(type(instance)))
if not mesh.mesh_program:
logger.warning("WARING: No mesh program applied to '%s'", mesh.name)
def calc_scene_bbox(self) -> None:
"""Calculate scene bbox"""
bbox_min, bbox_max = None, None
for node in self.root_nodes:
bbox_min, bbox_max = node.calc_global_bbox(
matrix44.create_identity(),
bbox_min,
bbox_max
)
self.bbox_min = bbox_min
self.bbox_max = bbox_max
self.diagonal_size = vector3.length(self.bbox_max - self.bbox_min)
def prepare(self) -> None:
"""prepare the scene for rendering.
Calls ``apply_mesh_programs()`` assigning default meshprograms if needed
and sets the model matrix.
"""
self.apply_mesh_programs()
# Recursively calculate model matrices
self.model_matrix = matrix44.create_identity()
def find_node(self, name: str = None):
"""Find a node
Keyword Args:
name (str): Case sensitive name
Returns:
A Node or ``None`` if not found.
"""
for node in self.nodes:
if node.name == name:
return node
return None
def find_material(self, name: str = None):
"""Finds a material
Keyword Args:
name (str): Case sensitive material name
Returns:
The material or ``None`` if not found.
"""
for mat in self.materials:
if mat.name == name:
return mat
return None
def destroy(self) -> None:
"""Destroys the scene data and vertex buffers"""
for mesh in self.meshes:
mesh.vao.release()
def __str__(self) -> str:
return "<Scene: {}>".format(self.name)
def __repr__(self) -> str:
return str(self)
| 31.448276
| 117
| 0.600721
|
0f7112686a9f339ab931d32e00e409d768d9e8eb
| 1,945
|
py
|
Python
|
salty/tests/test_iupac_smiles.py
|
jameslee97/salty-1
|
efe6e68cfcab75f8522a66f4fa8b21d2ad727df1
|
[
"MIT"
] | null | null | null |
salty/tests/test_iupac_smiles.py
|
jameslee97/salty-1
|
efe6e68cfcab75f8522a66f4fa8b21d2ad727df1
|
[
"MIT"
] | null | null | null |
salty/tests/test_iupac_smiles.py
|
jameslee97/salty-1
|
efe6e68cfcab75f8522a66f4fa8b21d2ad727df1
|
[
"MIT"
] | null | null | null |
from __future__ import absolute_import, division, print_function
import salty
from rdkit.Chem import AllChem as Chem
import unittest
import datetime
class iupac_smiles_tests(unittest.TestCase):
data_files = ["cationInfo.csv", "anionInfo.csv"]
df = salty.load_data(data_files[0])
smiles = df.smiles
for i in range(len(smiles)):
ion = smiles[i]
salty.check_name(ion)
def test_1_check_data(self):
for i in range(len(self.data_files)):
df = salty.load_data(self.data_files[i])
self.check_data(df)
def test_2_check_wrong_ion(selfs):
ion = 'stupid_nonsense_string'
salty.check_name(ion)
def test_benchmark(self):
salty.Benchmark.run(self.test_1_check_data)
salty.Benchmark.run(self.test_2_check_wrong_ion)
def check_data(self, df):
startTime = datetime.datetime.now()
def fnDisplay(message):
display(message, startTime)
smiles = df.smiles
for i in range(len(smiles)):
ion = smiles[i]
try:
Chem.SanitizeMol(Chem.MolFromSmiles(ion))
except ValueError:
name = salty.check_name(ion)
message = "RDKit cannot interpret %s ion SMILES in datafile" \
% name
fnDisplay(message)
if "-" not in ion and "+" not in ion:
name = salty.check_name(ion)
message = "%s ion does not have a charge" % name
fnDisplay(message)
if "." in ion:
name = salty.check_name(ion)
message = "%s ion contains more than one molecular entity" \
% name
fnDisplay(message)
def display(message, startTime):
timeDiff = datetime.datetime.now() - startTime
print("{}\t{}".format(timeDiff, message))
if __name__ == '__main__':
unittest.main()
| 30.873016
| 78
| 0.589203
|
121d720eda653e4393b2ac11a75acee7a485b353
| 395
|
py
|
Python
|
sary_exam/asgi.py
|
youssefelmasry/sary_assessment
|
0b0a91e11e3a7136eaf41fc0dc46314a5b88f630
|
[
"MIT"
] | 1
|
2021-11-11T15:07:14.000Z
|
2021-11-11T15:07:14.000Z
|
sary_exam/asgi.py
|
youssefelmasry/sary_assessment
|
0b0a91e11e3a7136eaf41fc0dc46314a5b88f630
|
[
"MIT"
] | null | null | null |
sary_exam/asgi.py
|
youssefelmasry/sary_assessment
|
0b0a91e11e3a7136eaf41fc0dc46314a5b88f630
|
[
"MIT"
] | null | null | null |
"""
ASGI config for sary_exam project.
It exposes the ASGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/howto/deployment/asgi/
"""
import os
from django.core.asgi import get_asgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'sary_exam.settings')
application = get_asgi_application()
| 23.235294
| 78
| 0.787342
|
b51080674f3ce8db2b01341ee946a0b73eec4f23
| 8,063
|
py
|
Python
|
vega/datasets/common/utils/auto_lane_utils.py
|
jie311/vega
|
1bba6100ead802697e691403b951e6652a99ccae
|
[
"MIT"
] | 724
|
2020-06-22T12:05:30.000Z
|
2022-03-31T07:10:54.000Z
|
vega/datasets/common/utils/auto_lane_utils.py
|
jie311/vega
|
1bba6100ead802697e691403b951e6652a99ccae
|
[
"MIT"
] | 147
|
2020-06-30T13:34:46.000Z
|
2022-03-29T11:30:17.000Z
|
vega/datasets/common/utils/auto_lane_utils.py
|
jie311/vega
|
1bba6100ead802697e691403b951e6652a99ccae
|
[
"MIT"
] | 160
|
2020-06-29T18:27:58.000Z
|
2022-03-23T08:42:21.000Z
|
# -*- coding: utf-8 -*-
# Copyright (C) 2020. Huawei Technologies Co., Ltd. All rights reserved.
# This program is free software; you can redistribute it and/or modify
# it under the terms of the MIT License.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# MIT License for more details.
"""This script is used to process the auto lane dataset."""
import cv2
import numpy as np
import json
import PIL
import os
def hwc2chw(img):
"""Transform image from HWC to CHW format.
:param img: image to transform.
:type: ndarray
:return: transformed image
:rtype: ndarray
"""
return np.transpose(img, (2, 0, 1))
def resize_by_wh(*, img, width, height):
"""Resize image by weight and height.
:param img:image array
:type: ndarray
:param width:
:type: int
:param height:
:type: int
:return:resized image
:rtype:ndarray
"""
dim = (width, height)
# resize image
resized = cv2.resize(img, dim, interpolation=cv2.INTER_AREA)
return resized
def exif_transpose(img):
"""If an image has an Exif Orientation tag, transpose the image accordingly.
Note: Very recent versions of Pillow have an internal version
of this function. So this is only needed if Pillow isn't at the
latest version.
:param image: The image to transpose.
:type: ndarray
:return: An image.
:rtype: ndarray
"""
if not img:
return img
exif_orientation_tag = 274
# Check for EXIF data (only present on some files)
if hasattr(img, "_getexif") and isinstance(img._getexif(), dict) and exif_orientation_tag in img._getexif():
exif_data = img._getexif()
orientation = exif_data[exif_orientation_tag]
# Handle EXIF Orientation
if orientation == 1:
# Normal image - nothing to do!
pass
elif orientation == 2:
# Mirrored left to right
img = img.transpose(PIL.Image.FLIP_LEFT_RIGHT)
elif orientation == 3:
# Rotated 180 degrees
img = img.rotate(180)
elif orientation == 4:
# Mirrored top to bottom
img = img.rotate(180).transpose(PIL.Image.FLIP_LEFT_RIGHT)
elif orientation == 5:
# Mirrored along top-left diagonal
img = img.rotate(-90, expand=True).transpose(PIL.Image.FLIP_LEFT_RIGHT)
elif orientation == 6:
# Rotated 90 degrees
img = img.rotate(-90, expand=True)
elif orientation == 7:
# Mirrored along top-right diagonal
img = img.rotate(90, expand=True).transpose(PIL.Image.FLIP_LEFT_RIGHT)
elif orientation == 8:
# Rotated 270 degrees
img = img.rotate(90, expand=True)
return img
def load_image_file(file, mode='RGB'):
"""Load an image file (.jpg, .png, etc) into a numpy array.
Defaults to returning the image data as a 3-channel array of 8-bit data. That is
controlled by the mode parameter.
Supported modes:
1 (1-bit pixels, black and white, stored with one pixel per byte)
L (8-bit pixels, black and white)
RGB (3x8-bit pixels, true color)
RGBA (4x8-bit pixels, true color with transparency mask)
CMYK (4x8-bit pixels, color separation)
YCbCr (3x8-bit pixels, color video format)
I (32-bit signed integer pixels)
F (32-bit floating point pixels)
:param file: image file name or file object to load
:type: str
:param mode: format to convert the image to - 'RGB' (8-bit RGB, 3 channels), 'L' (black and white)
:type: str
:return: image contents as numpy array
:rtype: ndarray
"""
# Load the image with PIL
img = PIL.Image.open(file)
if hasattr(PIL.ImageOps, 'exif_transpose'):
# Very recent versions of PIL can do exit transpose internally
img = PIL.ImageOps.exif_transpose(img)
else:
# Otherwise, do the exif transpose ourselves
img = exif_transpose(img)
img = img.convert(mode)
return np.array(img)
def imread(img_path):
"""Read image from image path.
:param img_path
:type: str
:return: image array
:rtype: nd.array
"""
img_path = os.path.normpath(os.path.abspath(os.path.expanduser(img_path)))
if os.path.exists(img_path):
img = cv2.imread(img_path)
if img is not None:
return img
else:
raise IOError(img_path)
else:
raise FileNotFoundError(img_path)
def get_img_whc(img):
"""Get image whc by src image.
:param img: image to transform.
:type: ndarray
:return: image info
:rtype: dict
"""
img_shape = img.shape
if len(img_shape) == 2:
h, w = img_shape
c = 1
elif len(img_shape) == 3:
h, w, c = img_shape
else:
raise NotImplementedError()
return dict(width=w, height=h, channel=c)
def bgr2rgb(img):
"""Convert image from bgr type to rgb type.
:param img: the image to be convert
:type img: nd.array
:return: the converted image
:rtype: nd.array
"""
return cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
def load_lines(file_path):
"""Read multi-lines file to list.
:param file_path: as name is the path of target file
:type file_path: str
:return: the content of file
:rtype: list
"""
with open(file_path) as f:
target_lines = list(map(str.strip, f))
return target_lines
def load_json(file_path):
"""Load annot json.
:param file_path:file path
:type: str
:return:json content
:rtype: dict
"""
with open(file_path) as f:
target_dict = json.load(f)
return target_dict
def imagenet_normalize(*, img):
"""Normalize image.
:param img: img that need to normalize
:type img: RGB mode ndarray
:return: normalized image
:rtype: numpy.ndarray
"""
pixel_value_range = np.array([255, 255, 255])
mean = np.array([0.485, 0.456, 0.406])
std = np.array([0.229, 0.224, 0.225])
img = img / pixel_value_range
img = img - mean
img = img / std
return img
def create_train_subset(data_path):
"""Create train dataset.
:param data_path: path of data
:type data_path: str
"""
images_list_path = os.path.join(data_path, 'list', 'train.txt')
images_list = load_lines(images_list_path)
path_pairs = []
for image_path_spec in images_list:
path_pair_spec = dict(
image_path=os.path.normpath(f'{data_path}/{image_path_spec}'),
annot_path=os.path.normpath(f'{data_path}/{image_path_spec}'.replace('.jpg', '.lines.txt'))
)
path_pairs.append(path_pair_spec)
return path_pairs
def create_valid_subset(data_path):
"""Create valid dataset.
:param data_path: path of data
:type data_path: str
"""
images_list_path = os.path.join(data_path, 'list', 'val.txt')
images_list = load_lines(images_list_path)
path_pairs = []
for image_path_spec in images_list:
path_pair_spec = dict(
image_path=os.path.normpath(f'{data_path}/{image_path_spec}'),
annot_path=os.path.normpath(f'{data_path}/{image_path_spec}'.replace('.jpg', '.lines.txt'))
)
path_pairs.append(path_pair_spec)
return path_pairs
def create_test_subset(data_path):
"""Create test dataset.
:param data_path: path of data
:type data_path: str
"""
images_list_path = os.path.join(data_path, 'list', 'test.txt')
images_list = load_lines(images_list_path)
path_pairs = []
for image_path_spec in images_list:
path_pair_spec = dict(
image_path=os.path.normpath(f'{data_path}/{image_path_spec}'),
annot_path=os.path.normpath(f'{data_path}/{image_path_spec}'.replace('.jpg', '.lines.txt'))
)
path_pairs.append(path_pair_spec)
return path_pairs
| 28.592199
| 112
| 0.637232
|
bc4226c3c2120c7a2d6a6c1e5f1f3a9b74ebb247
| 1,038
|
py
|
Python
|
pgAdmin/pgadmin4/web/pgadmin/tools/sqleditor/utils/constant_definition.py
|
WeilerWebServices/PostgreSQL
|
ae594ed077bebbad1be3c1d95c38b7c2c2683e8c
|
[
"PostgreSQL"
] | null | null | null |
pgAdmin/pgadmin4/web/pgadmin/tools/sqleditor/utils/constant_definition.py
|
WeilerWebServices/PostgreSQL
|
ae594ed077bebbad1be3c1d95c38b7c2c2683e8c
|
[
"PostgreSQL"
] | null | null | null |
pgAdmin/pgadmin4/web/pgadmin/tools/sqleditor/utils/constant_definition.py
|
WeilerWebServices/PostgreSQL
|
ae594ed077bebbad1be3c1d95c38b7c2c2683e8c
|
[
"PostgreSQL"
] | null | null | null |
##########################################################################
#
# pgAdmin 4 - PostgreSQL Tools
#
# Copyright (C) 2013 - 2020, The pgAdmin Development Team
# This software is released under the PostgreSQL Licence
#
##########################################################################
"""Definition of constants for SQLEditor."""
from flask_babelex import gettext
# Async Constants
ASYNC_OK = 1
ASYNC_READ_TIMEOUT = 2
ASYNC_WRITE_TIMEOUT = 3
ASYNC_NOT_CONNECTED = 4
ASYNC_EXECUTION_ABORTED = 5
# Transaction status constants
TX_STATUS_IDLE = 0
TX_STATUS__ACTIVE = 1
TX_STATUS_INTRANS = 2
TX_STATUS_INERROR = 3
# Connection status codes mapping
CONNECTION_STATUS_MESSAGE_MAPPING = dict({
0: gettext('The session is idle and there is no current transaction.'),
1: gettext('A command is currently in progress.'),
2: gettext('The session is idle in a valid transaction block.'),
3: gettext('The session is idle in a failed transaction block.'),
4: gettext('The connection with the server is bad.')
})
| 30.529412
| 75
| 0.644509
|
d83e4d7a14d2c01df3becfa2cd29a4639fe07978
| 834
|
py
|
Python
|
tutorials/W0D3_LinearAlgebra/solutions/W0D3_Tutorial3_Solution_2993f27c.py
|
sjbabdi/course-content
|
801890b0460ceb26b34f3ee784f4af26dacd74a5
|
[
"CC-BY-4.0",
"BSD-3-Clause"
] | 1
|
2021-06-09T09:56:21.000Z
|
2021-06-09T09:56:21.000Z
|
tutorials/W0D3_LinearAlgebra/solutions/W0D3_Tutorial3_Solution_2993f27c.py
|
macasal/course-content
|
0fc5e1a0d736c6b0391eeab587012ed0ab01e462
|
[
"CC-BY-4.0",
"BSD-3-Clause"
] | 1
|
2021-06-16T05:41:08.000Z
|
2021-06-16T05:41:08.000Z
|
tutorials/W0D3_LinearAlgebra/solutions/W0D3_Tutorial3_Solution_2993f27c.py
|
macasal/course-content
|
0fc5e1a0d736c6b0391eeab587012ed0ab01e462
|
[
"CC-BY-4.0",
"BSD-3-Clause"
] | null | null | null |
# 1) If both eigenvalues are above 1, the neural activity will eventually explode
#. to infinity or negative infinity, depending on initial conditions.
# 2) If both eigenvalues are below 1, the neural activity will eventually decay to 0.
# 3) The activities will explode to positive or negative infinity, but the exact trajectory
#. is drawn towards the eigenvector with the larger eigenvalue. This is because the larger eigenvalue
#. will increasingly dominate the other one as it is raised to increasingly larger powers.
#. 4) The activities will eventually explode to positive or negative infinity, unless
#. the initial condition lies exactly on the eigenvector with the small eigenvalue. If the
#. initial condition is near to that eigenvector, the trajectory will first go towards
#. the origin before exploding.
| 59.571429
| 103
| 0.782974
|
2be5dd0b4254a8cbcb61684991e87c52c4840090
| 624
|
py
|
Python
|
izone/celery.py
|
ltfred/site
|
f640a627599a4d0cc1e91867e314e1212727c81f
|
[
"MIT"
] | 1
|
2020-10-21T02:06:32.000Z
|
2020-10-21T02:06:32.000Z
|
izone/celery.py
|
ltfred/site
|
f640a627599a4d0cc1e91867e314e1212727c81f
|
[
"MIT"
] | null | null | null |
izone/celery.py
|
ltfred/site
|
f640a627599a4d0cc1e91867e314e1212727c81f
|
[
"MIT"
] | null | null | null |
from __future__ import absolute_import, unicode_literals
import os
from celery import Celery
from celery.schedules import crontab
from django.conf import settings
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "izone.settings")
app = Celery("izone", broker="redis://localhost:6379/0", backend="redis://localhost")
app.config_from_object("django.conf:settings", namespace="CELERY")
# 下面的设置就是关于调度器beat的设置
app.conf.beat_schedule = {
"reset": {
"task": "apiv2.tasks.reset_token_visit_count",
"schedule": crontab(hour=0), # 每天零点将访问次数重置
},
}
app.autodiscover_tasks(lambda: settings.INSTALLED_APPS)
| 27.130435
| 85
| 0.753205
|
3a8b3c460903e12665a2804d3399b645ea7574d3
| 300
|
py
|
Python
|
codigo/Live88/cat_3.py
|
cassiasamp/live-de-python
|
00b5e51793097544ba9b75c97a0d30e63970bf45
|
[
"MIT"
] | 572
|
2018-04-03T03:17:08.000Z
|
2022-03-31T19:05:32.000Z
|
codigo/Live88/cat_3.py
|
cassiasamp/live-de-python
|
00b5e51793097544ba9b75c97a0d30e63970bf45
|
[
"MIT"
] | 176
|
2018-05-18T15:56:16.000Z
|
2022-03-28T20:39:07.000Z
|
codigo/Live88/cat_3.py
|
cassiasamp/live-de-python
|
00b5e51793097544ba9b75c97a0d30e63970bf45
|
[
"MIT"
] | 140
|
2018-04-18T13:59:11.000Z
|
2022-03-29T00:43:49.000Z
|
import click
@click.command()
@click.argument('file', type=click.File('r'))
@click.argument('lines', type=click.BOOL)
def cat(file, lines):
if not lines:
click.echo(file.read())
else:
for n, line in enumerate(file.readlines()):
click.echo(f'{n} {line}')
cat()
| 18.75
| 51
| 0.603333
|
324c362a22e58169cd0dd4c5d8becabe8ae409be
| 2,675
|
py
|
Python
|
tensorflow/python/keras/saving/saved_model/json_utils_test.py
|
TL-Rubick/tensorflow
|
6cf1ccf6060a95aad3ccc84544d0aa166990ec72
|
[
"Apache-2.0"
] | 11
|
2018-01-03T15:11:09.000Z
|
2021-04-13T05:47:27.000Z
|
tensorflow/python/keras/saving/saved_model/json_utils_test.py
|
TL-Rubick/tensorflow
|
6cf1ccf6060a95aad3ccc84544d0aa166990ec72
|
[
"Apache-2.0"
] | 3
|
2021-08-25T15:06:34.000Z
|
2022-02-10T02:50:24.000Z
|
tensorflow/python/keras/saving/saved_model/json_utils_test.py
|
TL-Rubick/tensorflow
|
6cf1ccf6060a95aad3ccc84544d0aa166990ec72
|
[
"Apache-2.0"
] | 10
|
2018-07-31T10:56:21.000Z
|
2019-10-07T08:05:21.000Z
|
# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
# pylint: disable=protected-access
"""Tests the JSON encoder and decoder."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import tensor_spec
from tensorflow.python.keras.saving.saved_model import json_utils
from tensorflow.python.platform import test
class JsonUtilsTest(test.TestCase):
def test_encode_decode_tensor_shape(self):
metadata = {
'key1': tensor_shape.TensorShape(None),
'key2': [tensor_shape.TensorShape([None]),
tensor_shape.TensorShape([3, None, 5])]}
string = json_utils.Encoder().encode(metadata)
loaded = json_utils.decode(string)
self.assertEqual(set(loaded.keys()), {'key1', 'key2'})
self.assertAllEqual(loaded['key1'].rank, None)
self.assertAllEqual(loaded['key2'][0].as_list(), [None])
self.assertAllEqual(loaded['key2'][1].as_list(), [3, None, 5])
def test_encode_decode_tuple(self):
metadata = {
'key1': (3, 5),
'key2': [(1, (3, 4)), (1,)]}
string = json_utils.Encoder().encode(metadata)
loaded = json_utils.decode(string)
self.assertEqual(set(loaded.keys()), {'key1', 'key2'})
self.assertAllEqual(loaded['key1'], (3, 5))
self.assertAllEqual(loaded['key2'], [(1, (3, 4)), (1,)])
def test_encode_decode_type_spec(self):
spec = tensor_spec.TensorSpec((1, 5), dtypes.float32)
string = json_utils.Encoder().encode(spec)
loaded = json_utils.decode(string)
self.assertEqual(spec, loaded)
invalid_type_spec = {'class_name': 'TypeSpec', 'type_spec': 'Invalid Type',
'serialized': None}
string = json_utils.Encoder().encode(invalid_type_spec)
with self.assertRaisesRegexp(ValueError, 'No TypeSpec has been registered'):
loaded = json_utils.decode(string)
if __name__ == '__main__':
test.main()
| 38.214286
| 80
| 0.68972
|
ff7b22ac5c7126564ff5fc55544a2cb4839e1808
| 581
|
py
|
Python
|
dataactcore/migrations/versions/38d8383270c1_merge_4bbc4_and_ae35b.py
|
brianherman/data-act-broker-backend
|
80eb055b9d245046192f7ad4fd0be7d0e11d2dec
|
[
"CC0-1.0"
] | 1
|
2019-06-22T21:53:16.000Z
|
2019-06-22T21:53:16.000Z
|
dataactcore/migrations/versions/38d8383270c1_merge_4bbc4_and_ae35b.py
|
brianherman/data-act-broker-backend
|
80eb055b9d245046192f7ad4fd0be7d0e11d2dec
|
[
"CC0-1.0"
] | 3
|
2021-08-22T11:47:45.000Z
|
2022-03-29T22:06:49.000Z
|
dataactcore/migrations/versions/38d8383270c1_merge_4bbc4_and_ae35b.py
|
brianherman/data-act-broker-backend
|
80eb055b9d245046192f7ad4fd0be7d0e11d2dec
|
[
"CC0-1.0"
] | 1
|
2020-07-17T23:50:56.000Z
|
2020-07-17T23:50:56.000Z
|
"""Merge 4bbc47f2b48d and ae35bd44ec9a
Revision ID: 38d8383270c1
Revises: 4bbc47f2b48d, ae35bd44ec9a
Create Date: 2018-10-19 10:07:46.992062
"""
# revision identifiers, used by Alembic.
revision = '38d8383270c1'
down_revision = ('4bbc47f2b48d', 'ae35bd44ec9a')
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_data_broker():
pass
def downgrade_data_broker():
pass
| 15.702703
| 48
| 0.736661
|
7a6ce917881a195394f2f6210a49285f54d48742
| 23,888
|
py
|
Python
|
ffclassifier.py
|
dnm1977/python-sparse-category-indices
|
c92a29fc53ef286514be9435031debe222be6340
|
[
"Apache-2.0"
] | null | null | null |
ffclassifier.py
|
dnm1977/python-sparse-category-indices
|
c92a29fc53ef286514be9435031debe222be6340
|
[
"Apache-2.0"
] | null | null | null |
ffclassifier.py
|
dnm1977/python-sparse-category-indices
|
c92a29fc53ef286514be9435031debe222be6340
|
[
"Apache-2.0"
] | null | null | null |
"""
A classifier using the Feature Focus model from Madani and Connor (2007,2008).
Authors: Dennis N. Mehay and Chris H. Brew
"""
import sys, os, time
import doctest
from optparse import OptionParser
import StringIO, codecs
# try to import profiler. will crash if not there and profile option is set below.
try:
import hotshot,hotshot.stats
except:
pass
import bisect
#from collections import defaultdict
def recallAtK(cls, ranking, k):
"""
This function takes a class label ('cls'), a ranked list 'ranking' of (class,score)
pairs (ranked by virtue of being sorted in descending order of score) and
a parameter 'k', and returns 1 if 'cls' is in 'ranking' in the top 'k' ranked
classes. The word 'recall' is for conformity with IR terminology. Think of this
as an indicator function saying whether 'cls' is "recalled" in the top 'k' classes
of 'ranking'.
@param cls: [the string name of the class to search for in the top k ranked classes in
'ranking'.]
@param ranking: [the ranked list of (class,score) pairs.]
@param k: [the number of top ranked classes to search in for 'cls'.]
@return: [1 if 'cls' is in the top 'k' slots of 'ranking' and 0 otherwise.]
>>> ranking = [('a',1.5), ('b',0.45), ('c',0.22), ('d',0.1)]
>>> recallAtK('c',ranking,1)
0
>>> recallAtK('c',ranking,2)
0
>>> recallAtK('c',ranking,3)
1
>>> recallAtK('c',ranking,4)
1
>>> recallAtK('a',ranking,1)
1
>>> recallAtK('a',ranking,4)
1
"""
indicator = 0
rank = 0
for (cl,score) in ranking:
rank += 1
if rank <= k:
if (cl == cls):
indicator = 1
break
else:
break
return indicator
def recipRank(cls, ranking):
"""
This function takes a class and returns its reciprocal rank score in the
sorted association list of scores [(cls1,score), ..., (clsN,score)] 'ranking'
(sorted in descending order by score).
Note that the reciprocal rank for classes not in the ranked list of scores is
assumed to be 0.0.
@param cls: [the class in question.]
@param ranking: [the ranked list of scores [(cls1,score), ..., (clsN,score).]]
@return: [the reciprocal rank of 'cls'.]
>>> ranking = [('a',1.5), ('b',0.45), ('c',0.22), ('d',0.1)]
>>> '%0.2f' % recipRank('c',ranking)
'0.33'
>>> '%0.1f' % recipRank('a',ranking)
'1.0'
>>> '%0.1f' % recipRank('b',ranking)
'0.5'
"""
rr = 0.0
rank = 0
for i in ranking:
rank += 1
if cls == i[0]:
rr = 1./rank
break
return rr
def by_descending_count(pair):
"""
This function acts as a key for sort.
It helps sort pairs of (key,count) into descending order of count
>>> x = [("c",1),("a",2),("b",0)]
>>> x.sort(key=by_descending_count)
>>> x
[('a', 2), ('c', 1), ('b', 0)]
"""
return -pair[1]
class FeatureFocusModel(dict):
"""
A classifier using the FeatureFocus model from Madani and Connor (2007,2008).
"""
slots = ["_dmax","_margin","_wmin"]
def __init__(self, dmax=25, margin=0.5, wmin=0.01,items=[]):
"""
Initialize the classifer
>>> FeatureFocusModel(10,0.2,0.1,[("a","b")])
<FeatureFocusModel10,0.200000,0.100000,[('a', 'b')])
>>> FeatureFocusModel(10,0.2,0.1)
<FeatureFocusModel10,0.200000,0.100000,[])
@param dmax: [how many classes we will let each feature contribute to the scoring of.]
@param margin: [how close to error driven updates are we (0.0 is error driven and 1.0 is
always update, while 0.5, 0.2, etc. fall somewhere in between).]
@param wmin: [what is the minimum proportion that a feature can predict a class with and
still be kept around -- 0.0 means keep all feature-class associations around,
while 1.0 means keep (nearly?) none.]
"""
self._dmax = dmax
self._margin = margin
self._wmin = wmin
if items:
for (k,v) in items:
self[k] = v
def __repr__(self):
"""
>>> FeatureFocusModel(10,0.2,0.1)
<FeatureFocusModel10,0.200000,0.100000,[])
"""
return "<FeatureFocusModel%d,%f,%f,%r)" % (
self._dmax,self._margin,self._wmin,
self.items()
)
def getClasses(self, f, inputact=1):
"""
Retrieve all dmax classes for feature f as an association list of pairs
@param f: [a FeatureFocus]
@param inputact: [how much has feature f been activated?]
@return: [a list of pairs of classes and association strengths for feature f]
"""
return f.getClasses(inputact, self._dmax)
def predict(self, classContextList, boolean=True):
cl = classContextList[0]
ctxt = self.makeContext(classContextList[1:], boolean)
ranking = self.rankedRetrieval(ctxt)
return ranking
def learnFrom(self, classContextList, boolean=True):
"""
Process an instance. May trigger an update of the model if margin
is insufficient.
>>> ffm = FeatureFocusModel(2,0.0,0.1)
>>> ccl = ['a','f1','f2','f3']
>>> ranking = ffm.learnFrom(ccl)
>>> ranking
[]
>>> ccl = ['b', 'f1','f4']
>>> ranking = ffm.learnFrom(ccl)
>>> len(ranking) == 1
True
>>> ranking[0][0]
'a'
@param classContextList: [a list of [class, feat1, feat2, ...] representing a learning instance]
@return: a ranking (pre-learning) over classes [(class,score), ..., (class, score)].
"""
cl = classContextList[0]
ctxt = self.makeContext(classContextList[1:],boolean)
ranking = self.rankedRetrieval(ctxt)
marg = self.computeMargin(cl, ranking)
# following code handles update of active features
# if the margin is insufficient
if marg <= self._margin:
for f in ctxt:
# look up the class associations for each active feature of this
# context and potentially strengthen the weight btwn it and the current
# class.
try:
ffeat = self[f]
except KeyError:
ffeat = FeatureFocus()
ffeat.update(cl, ctxt[f], self._wmin)
self[f] = ffeat
# return the ranking that was calculated prior to
# any updating that may have occurred
return ranking
def rankedRetrieval(self, context):
"""
Returns a ranking over classes as list of pairs (class,score) in descending order
of score.
@param context: [the active features that 'fire' in this context]
@return: [as described]
>>> ffm = FeatureFocusModel(2,0.2,0.1)
>>> rk = ffm.learnFrom(['a','f1','f2','f3'])
>>> rk = ffm.learnFrom(['b','f1','f2','f4'])
>>> ctxt = ffm.makeContext(['f1','f4'])
>>> ranking = ffm.rankedRetrieval(ctxt)
>>> # build up a ranking with string-based repr's of scores.
>>> str_ranking = []
>>> i = 0
>>> while i < len(ranking): str_ranking.append( (ranking[i][0], ('%0.1f' % ranking[i][1])) ); i += 1
>>> str_ranking
[('b', '0.2'), ('a', '0.1')]
"""
ranking = {}
for f in context:
fetch = self.get(f)
# only update class ranking for features already seen
if not(fetch is None):
for (c,w) in self.getClasses(fetch, context[f]):
# update the score for class c
ranking[c] = ranking.get(c,0.0) + w
res = [(k,ranking[k]) for k in ranking]
# sort descending
res.sort(key=by_descending_count)
return res
def makeContext(self, str_ls, boolean=True, splitter=':'):
"""
Turns a string-list repr of a context in to a Context object.
>>> ffl = FeatureFocusModel(2,0.1,0.1);
>>> ctxt_str = ['f1','f1','f2','f3','f1','f2'];
>>> ffl.makeContext(ctxt_str)
{'f1': 3, 'f2': 2, 'f3': 1}
>>> ffl = FeatureFocusModel(2,0.1,0.1);
>>> ctxt_str = ['f1:1.0','f1:0.8','f2:0.3','f3:0.9','f1:1.0','f2:0.7'];
>>> d = ffl.makeContext(ctxt_str, False);
>>> for k,v in d.items(): d[k] = '%0.1f' %d[k]
>>> d
{'f1': '2.8', 'f2': '1.0', 'f3': '0.9'}
"""
ctxt = Context()
if boolean:
for f in str_ls:
ctxt.addBoolean(f)
return ctxt
else:
for f in str_ls:
split = f.rfind(splitter)
# oops. no colons.
if split == -1:
raise "No colons in alleged weighted list"
else:
name = f[:split]
val = float(f[split+1:])
ctxt.addWeighted(name, val)
return ctxt
def computeMargin(self, correct, ranking):
"""
Compute the margin of error given the correct class and a ranking over
classes.
>>> ffl = FeatureFocusModel(2,0.1,0.1);
>>> ranking = [('a',0.3), ('b',0.2), ('c',0.1)];
>>> correct = 'a';
>>> "%0.2f" %ffl.computeMargin(correct, ranking)
'0.10'
@param correct: [a string giving the correct class.]
@param ranking: [a list of pairs (class, score) sorted in descending order of score.]
@return: [a real-valued margin of correctness that is (score_correct) - (score_highest_incorrect)
(note that it can be 0.0 or negative).]
"""
if ranking == []:
return 0.0
if len(ranking) == 1 and ranking[0][0]==correct:
return ranking[0][1]
correct_score = None
wrong_score = None
# go through the list until we find the score of the correct
# category and that of the higest-ranked negative category (if
# they're in there).
i = 0
while i < len(ranking) and ((correct_score is None) or (wrong_score is None)):
(c,s) = ranking[i]
if (correct_score is None) and (c == correct):
correct_score = s
elif wrong_score is None:
wrong_score = s
i += 1
if correct_score is None:
correct_score = 0.0
if wrong_score is None:
wrong_score = 0.0
return correct_score - wrong_score
def writeToFile(self, fname):
"""
Write the current model parameters to file:
@param fname: the name of the file to write the model to.
@return: [None].
"""
f = codecs.open(fname, "wb", "utf-8")
try:
print >> f, self._dmax, self._margin, self._wmin
# the order in which items are presented in a dictionary
# is not determined, but we want order to be definite so
# that we can check file identity with diff,
# so sort the items.
its = self.items()
its.sort()
for feat_name, ffmap in its:
ffits = ffmap.items()
# if there are no items, we don't want to bother printing
if ffits:
# same reasoning as above: make item order canonical
ffits.sort()
print >> f, feat_name, ffmap._wtotal,
for classname,assoc in ffits:
print >> f, classname+':'+str(assoc),
f.write("\n")
finally:
f.close()
def readFromFile(self, fname):
self.clear() # I'm a kind of dictionary, and my contents are to be overwritten
f = codecs.open(fname, "rb", "utf-8")
try:
(dmax,margin,wmin) = f.readline().split()
self._dmax = int(dmax)
self._margin = float(margin)
self._wmin = float(wmin)
for line in f:
x = line.split()
fname = x[0]
wtotal = float(x[1])
ff = FeatureFocus()
for kv in x[2:]:
# fixed case where there was a colon in a substring of 'k' in the string 'kv' to be split.
splitpt = kv.rfind(":")
k = kv[:splitpt]
v = kv[splitpt+1:]
# with real-valued feature activation, 'v' can be a float.
ff[k] = float(v)
ff._wtotal = wtotal
self[fname] = ff
finally:
f.close()
class Context(dict):
"""
A Context is just a dictionary with two extra methods. This is a new-style
Python class, because it inherits from dict.
"""
__slots__ = []
def addBoolean(self, f):
"""
@param f: [the boolean feature that is activated.]
@return: [None.]
>>> ctx = Context()
>>> ctx.addBoolean("tom")
>>> ctx
{'tom': 1}
"""
self[f] = (self.get(f,0) + 1)
def addWeighted(self, f, w):
"""
@param f: [the real-valued feature that is activated.]
@param w: [the real value of the feature.]
@return: [None.]
>>> ctx = Context()
>>> ctx.addWeighted("jerry",2.0)
>>> ctx
{'jerry': 2.0}
"""
self[f] = (self.get(f,0.0) + w)
class FeatureFocus(dict):
"""
A 'smart' container that has a mapping from classes to association weights.
this container can absorb updated associations, adjust its internals and drop
those associations that fall below min. This is a new-style extension of dict.
Try to adjust this to use two arrays rather than a dictionary.
"""
__slots__ = ['_wtotal']
def __init__(self):
self._wtotal = 0.0
def __str__(self):
"""
@return: [a string representation of this FeatureFocus object.]
"""
res = "wtot: "+str(self._wtotal)+"\n"
for w in self.items():
res += " "+str(w)+"\n"
return res.rstrip()
def update(self, cls, inputact, wmin):
"""
@param cls: [the class that was observed with input activation 'inputact' (1 for boolean features)].
@param inputact: [the input activation strength of the feature.]
@param wmin: [the minimum proportion that a feature can predict this class and still be kept
in this focus set.]
@return: None
>>> wmin = 0.10
>>> ff = FeatureFocus()
>>> ff.update('a',1,wmin)
>>> ff.update('b',1,wmin)
>>> cs = ff.getClasses()
>>> i = 0;
>>> str_cls = []
>>> # turn each score into a string, formatted just so.
>>> while i < len(cs): str_cls.append( (cs[i][0],('%0.1f' % cs[i][1])) ); i+=1;
>>> str_cls
[('a', '0.1'), ('b', '0.1')]
>>> ff.update('a',17,wmin)
>>> ff.update('b',1,wmin)
>>> cls = ff.getClasses()
>>> len(cls)
1
>>> cls[0][0]
'a'
>>> ff._wtotal
20.0
"""
self[cls] = self.get(cls,0.0) + inputact
self._wtotal += inputact
# update by computing new proportions (ratios) and dropping weights whose
# proportions are now less than wmin
tot = self._wtotal
for c in self.keys():
proport = (self[c]/tot)
if proport < wmin:
del(self[c])
def getClasses(self, inputact=1.0, dmax=None):
"""
@param inputact: [the input activation of this feature (self).]
@param dmax: [the maximum number of categories we are interested in retrieving (all if None).]
@return: [an association list of classes to weights in ranked (descending) order.]
>>> f = FeatureFocus()
>>> wmin = 0.1
>>> f.update('a',2,wmin)
>>> f.update('b',3,wmin)
>>> cls = f.getClasses()
>>> cls[0][0]
'b'
>>> '%0.2f' % cls[0][1]
'0.30'
>>> cls[1][0]
'a'
>>> '%0.2f' % cls[1][1]
'0.20'
>>> # Remember that there is downweighting.
"""
if len(self) <= dmax:
dmax = None
res = []
tot = float(self._wtotal)
# downweight features seen < 10 times
if tot < 10:
tot = 10.0
# rank classes by their proportional weights times the activation strength
# of self (the feature).
res = [(c, inputact * (self[c]/tot)) for c in self]
res.sort(key=by_descending_count)
if dmax is None:
return res
else:
return res[:dmax]
def t(args):
op = OptionParser()
op.add_option("-p", "--profile", action="store_true", help="Whether or not to profile [default = False]", default=False)
op.add_option("-f", "--inputf", type="string", \
help="The input file of 'class feat1 feat2 feat3 ...' lines\n"+\
" To be used either for training or testing.")
op.add_option("-m", "--modelf", type="string", \
help="The location of the (already-trained or to-be-trained) text model file.")
op.add_option("-t", "--train", action="store_true", help="Whether or not to train [default = False]", default=False)
op.add_option("-T", "--test", action="store_true", help="Whether or not to test [default = False]", default=False)
op.add_option("-i", "--iterations", type="int", help="How many iterations to train (if training).", default=1)
op.add_option("--dmax", type="int", help="The maximum number of classes each feature can predict [default = 25].", default=25)
op.add_option("--wmin", type="float", help="The minimum proportion of the total focus feature group weight\n"+\
"that a class can be given by a feature [default = 0.01].", default=0.01)
op.add_option("--dmarg", type="float", help="The margin by which the ff classifier must predict (one of) the\n"+\
"correct cat's without update being triggered [default = 0.5].", default=0.5)
op.add_option("-b", "--boolean", action="store_true", help="Signals whether the context has Boolean (as opposed to real-valued)\n"+\
"activation in the input features.",default=False)
op.add_option("-r", "--real_valued", action="store_true", help="Signals whether the context has real-valued (as opposed to Boolean)\n"+\
"activation in the input features.",\
default=False)
(options, args) = op.parse_args(args)
if options.profile:
prof = hotshot.Profile("classify.prof")
prof.runcall(s,options,args)
prof.close()
stats = hotshot.stats.load("classify.prof")
stats.strip_dirs()
stats.sort_stats('time','calls')
stats.print_stats(20)
else:
s(options,args)
def s(options,args):
assert(not(options.real_valued and options.boolean) \
and (options.real_valued or options.boolean))
if options.test:
assert(not options.train)
assert(options.inputf and os.path.exists(options.inputf))
assert(options.modelf and os.path.exists(options.modelf))
FFM = FeatureFocusModel()
print >> sys.stderr, "Loading model %s ..." % options.modelf
sys.stderr.flush()
FFM.readFromFile(options.modelf)
print >> sys.stderr, "Testing on data set %s: " % (options.inputf)
sys.stderr.flush()
# did the following to make sure can write file back out with no loss
# FFM.writeToFile("checkup.model")
f = open(options.inputf,"r")
# For IR-style stat's
MRR_tot = 0.0
R_1_tot = 0.0
R_5_tot = 0.0
tot_correct = 0.0
R_5_tot = 0.0
tot_correct = 0.0
tot_instances = 0
starttime = 0.0
booleanFeats = None
if options.real_valued:
booleanFeats = False
else:
booleanFeats = True
try:
starttime = time.time()
for l in f:
tot_instances += 1
l = l.strip().split()
ranking = FFM.predict(l, booleanFeats)
MRR_tot += recipRank(l[0], ranking)
tot_correct += recallAtK(l[0], ranking, 1)
R_5_tot += recallAtK(l[0], ranking, 5)
if tot_instances % 50000 == 0:
print >> sys.stderr, "running ave. at instance no. %d" % tot_instances, tot_correct/tot_instances,tot_instances
endtime = time.time()
finally:
f.close()
print "correct answer is top guess", tot_correct/tot_instances
print "correct answer in top 5 guesses",R_5_tot/tot_instances
print "MRR",MRR_tot/tot_instances
print "total time in seconds (after loading model)", endtime - starttime
print "total no. of instances", tot_instances
return
if not(options.train) and not(options.inputf and os.path.exists(options.inputf)):
print >> sys.stderr, "Feature input file %s does not exist." % (options.inputf)
return
if options.train:
FFM = FeatureFocusModel(options.dmax, options.dmarg, options.wmin, [])
starttime = time.time()
for loop in range(options.iterations):
print "Training on data set %s: iteration %s" % (options.inputf, str(loop+1))
sys.stdout.flush()
f = open(options.inputf, 'r')
# For IR-style stat's
MRR_tot = 0.0
R_1_tot = 0.0
R_5_tot = 0.0
tot_instances = 0
booleanFeats = None
if options.real_valued:
booleanFeats = False
else:
booleanFeats = True
try:
for l in f:
tot_instances += 1
l = l.strip().split()
ranking = FFM.learnFrom(l, booleanFeats)
MRR_tot += recipRank(l[0], ranking)
R_1_tot += recallAtK(l[0], ranking, 1)
R_5_tot += recallAtK(l[0], ranking, 5)
message_center = ""
if options.train:
message_center = "for iteration %s" % (str(loop+1))
else:
# testing; talk of 'iterations' is meaningless.
message_center = "is"
print "MRR "+message_center+": "+str(MRR_tot/tot_instances)
print "R-1 recall "+message_center+": "+str(R_1_tot/tot_instances)
print "R-5 recall "+message_center+": "+str(R_5_tot/tot_instances)
finally:
f.close()
FFM.writeToFile(options.modelf)
endtime = time.time()
print "total time in seconds %0.1d" % (endtime-starttime)
print "ave. per-iteration time in seconds %0.1d" % ((endtime-starttime)/options.iterations)
if __name__=="__main__":
doctest.testmod()
| 34.97511
| 140
| 0.526247
|
57e58a1b35b05b61516d3c215cd82033a69f4fdf
| 2,725
|
py
|
Python
|
controller.py
|
pyton4ik/RobotCook
|
d557d348b2f5cc12536d895050c82f4558194fb5
|
[
"Apache-2.0"
] | null | null | null |
controller.py
|
pyton4ik/RobotCook
|
d557d348b2f5cc12536d895050c82f4558194fb5
|
[
"Apache-2.0"
] | null | null | null |
controller.py
|
pyton4ik/RobotCook
|
d557d348b2f5cc12536d895050c82f4558194fb5
|
[
"Apache-2.0"
] | null | null | null |
"""
TODO
"""
from sqlalchemy.orm import Session
from models import Order, OrderItems, Product, Receipt
from chef import Recipe
from errors import ProductNotFound
def get_product(db: Session, product_id):
res = db.query(Product).filter(Product.id == product_id).first()
if res is None:
raise ProductNotFound(product=product_id)
return res
def get_products_list(db: Session):
return db.query(Product).filter(Product.active == True).all()
def create_product_order(db: Session, **kwargs):
"""Create product order in DB but not cooking it"""
curr_kwargs = kwargs.copy()
items = curr_kwargs.pop("order_items")
db_order = Order(**curr_kwargs)
db.add(db_order)
db.commit()
for item in items:
item = dict(item)
product_obj = db.query(Product).filter(Product.id == item.get("product_id")).first()
db_order_item = OrderItems(order_id=db_order.id,
product_id=product_obj.id,
qty=item.get("qty"),
price=product_obj.price)
db.add(db_order_item)
db.commit()
db.commit()
db.refresh(db_order)
return db_order
def get_order_obj(db: Session, order_id: int):
"""
:param db: Alchemy Session
:param order_id: order ID
:return: Alchemy Order object
"""
return db.query(Order).filter(Order.id == order_id).first()
def get_receipt_item(db: Session, product_id: int):
"""
:param db: Alchemy Session
:param product_id: Product ID
:return: Receip Array dict with keys (ingredient, operation, time)
"""
receipt_objs = db.query(Receipt).filter(Receipt.product_id == product_id).all()
return [{"ingredient": recipe_item.ingredient,
"operation": recipe_item.operation,
"time": recipe_item.wait_time} for recipe_item in receipt_objs]
def cook_product_order(db: Session, order_id: int):
order_obj = get_order_obj(db, order_id)
for order_item in order_obj.order_items:
receipt = get_receipt_item(db, order_item.product_id)
if order_item.remaining_qty > 0:
for _ in range(order_item.remaining_qty):
if create_from_raw_recipe(receipt):
order_item.processed_qty += 1
db.commit()
order_obj.state = "ready"
db.commit()
return order_obj
async def cook_product_id(db: Session, product_id: int, node=""):
receipt = get_receipt_item(db, product_id)
await create_from_raw_recipe(receipt)
print ("The node {} has finished cooking the product {}".format(node, product_id))
async def create_from_raw_recipe(items):
return await Recipe(items)()
| 29.945055
| 92
| 0.653211
|
c4016171ed10e16cdb2de34f82af00c155c819b9
| 7,020
|
py
|
Python
|
src/pybel_tools/summary/composite_summary.py
|
cthoyt/pybel-tools
|
0bc9f9267178d164dab210f24255793cf4e32ab0
|
[
"MIT"
] | 6
|
2017-03-09T14:28:19.000Z
|
2019-11-21T00:00:15.000Z
|
src/pybel_tools/summary/composite_summary.py
|
pybel/pybel-tools
|
0bc9f9267178d164dab210f24255793cf4e32ab0
|
[
"MIT"
] | 134
|
2016-11-24T11:10:03.000Z
|
2020-09-10T17:03:13.000Z
|
src/pybel_tools/summary/composite_summary.py
|
cthoyt/pybel-tools
|
0bc9f9267178d164dab210f24255793cf4e32ab0
|
[
"MIT"
] | 5
|
2017-03-08T13:28:02.000Z
|
2020-04-05T02:23:17.000Z
|
# -*- coding: utf-8 -*-
"""A BEL Graph summary class."""
from __future__ import annotations
import collections
from dataclasses import dataclass
from typing import Counter, List, Mapping, Set, Tuple
from dataclasses_json import dataclass_json
from pybel import BELGraph, BaseAbundance, BaseEntity
from pybel.constants import IDENTIFIER, NAME
from pybel.struct.graph import WarningTuple
from pybel.struct.summary import (
get_naked_names, get_syntax_errors, get_top_hubs, get_top_pathologies, get_unused_annotations,
get_unused_list_annotation_values, get_unused_namespaces,
)
from .error_summary import (
get_most_common_errors, get_namespaces_with_incorrect_names, get_undefined_annotations, get_undefined_namespaces,
)
from .provenance import count_confidences, get_citation_years
from .stability import (
get_chaotic_pairs, get_contradiction_summary, get_dampened_pairs, get_decrease_mismatch_triplets,
get_increase_mismatch_triplets, get_jens_unstable, get_mutually_unstable_correlation_triples, get_regulatory_pairs,
get_separate_unstable_correlation_triples,
)
from ..typing import SetOfNodePairs, SetOfNodeTriples
from ..utils import prepare_c3, prepare_c3_time_series
__all__ = [
'BELGraphSummary',
]
@dataclass_json
@dataclass
class BELGraphSummary:
"""A container for a summary of a BEL graph."""
# Attribute counters
function_count: Counter[str]
modifications_count: Counter[str]
relation_count: Counter[str]
authors_count: Counter[str]
variants_count: Counter[str]
namespaces_count: Counter[str]
# Errors
undefined_namespaces: Set[str]
undefined_annotations: Set[str]
namespaces_with_incorrect_names: Set[str]
unused_namespaces: Set[str]
unused_annotations: Set[str]
unused_list_annotation_values: Mapping[str, Set[str]]
naked_names: Set[str]
error_count: Counter[str]
error_groups: List[Tuple[str, int]]
syntax_errors: List[WarningTuple]
# Node counters
hub_data: Counter[BaseEntity]
disease_data: Counter[BaseEntity]
# Node pairs
regulatory_pairs: SetOfNodePairs
chaotic_pairs: SetOfNodePairs
dampened_pairs: SetOfNodePairs
contradictory_pairs: SetOfNodePairs
# Node triplets
separate_unstable_correlation_triples: SetOfNodeTriples
mutually_unstable_correlation_triples: SetOfNodeTriples
jens_unstable: SetOfNodeTriples
increase_mismatch_triplets: SetOfNodeTriples
decrease_mismatch_triplets: SetOfNodeTriples
# Bibliometrics
citation_years: List[Tuple[int, int]]
confidence_count: Counter[str]
@staticmethod
def from_graph(graph: BELGraph) -> BELGraphSummary:
"""Create a summary of the graph."""
return BELGraphSummary(
# Attribute counters
function_count=graph.count.functions(),
modifications_count=graph.count.modifications(),
relation_count=graph.count.relations(),
authors_count=graph.count.authors(),
variants_count=graph.count.variants(),
namespaces_count=graph.count.namespaces(),
# Errors
undefined_namespaces=get_undefined_namespaces(graph),
undefined_annotations=get_undefined_annotations(graph),
namespaces_with_incorrect_names=get_namespaces_with_incorrect_names(graph),
unused_namespaces=get_unused_namespaces(graph),
unused_annotations=get_unused_annotations(graph),
unused_list_annotation_values=get_unused_list_annotation_values(graph),
naked_names=get_naked_names(graph),
error_count=graph.count.error_types(),
error_groups=get_most_common_errors(graph),
syntax_errors=get_syntax_errors(graph),
# Node pairs
regulatory_pairs=get_regulatory_pairs(graph),
chaotic_pairs=get_chaotic_pairs(graph),
dampened_pairs=get_dampened_pairs(graph),
contradictory_pairs=get_contradiction_summary(graph),
separate_unstable_correlation_triples=get_separate_unstable_correlation_triples(graph),
mutually_unstable_correlation_triples=get_mutually_unstable_correlation_triples(graph),
jens_unstable=get_jens_unstable(graph),
increase_mismatch_triplets=get_increase_mismatch_triplets(graph),
decrease_mismatch_triplets=get_decrease_mismatch_triplets(graph),
# Bibliometrics
citation_years=get_citation_years(graph),
confidence_count=count_confidences(graph),
# Random
hub_data=_count_top_hubs(graph),
disease_data=_count_top_diseases(graph),
)
def prepare_c3_for_function_count(self):
"""Prepare C3 JSON for function counts."""
return prepare_c3(self.function_count, 'Entity Type')
def prepare_c3_for_relation_count(self):
"""Prepare C3 JSON for relation counts."""
return prepare_c3(self.relation_count, 'Relationship Type')
def prepare_c3_for_error_count(self):
"""Prepare C3 JSON for error counts."""
if self.error_count is not None:
return prepare_c3(self.error_count, 'Error Type')
def prepare_c3_for_transformations(self):
"""Prepare C3 JSON for transformation counts."""
if self.modifications_count is not None:
return prepare_c3(self.modifications_count, 'Edge Variants')
def prepare_c3_for_variants(self):
"""Prepare C3 JSON for variant counts."""
if self.variants_count is not None:
return prepare_c3(self.variants_count, 'Node Variants')
def prepare_c3_for_namespace_count(self):
"""Prepare C3 JSON for namespace counts."""
return prepare_c3(self.namespaces_count, 'Namespaces')
def prepare_c3_for_citation_years(self):
"""Prepare C3 JSON for citation year counts."""
if self.citation_years is not None:
return prepare_c3_time_series(self.citation_years, 'Number of articles')
def prepare_c3_for_hub_data(self):
"""Prepare C3 JSON for hub counts."""
return prepare_c3(self.hub_data, 'Top Hubs')
def prepare_c3_for_pathology_count(self):
"""Prepare C3 JSON for pathology counts."""
if self.disease_data is not None:
return prepare_c3(self.disease_data, 'Pathologies')
def _count_top_hubs(graph, n: int = 15):
return collections.Counter({
(
node.name or node.identifier
if NAME in node or IDENTIFIER in node else
str(node)
): degree
for node, degree in get_top_hubs(graph, n=n)
if isinstance(node, BaseAbundance)
})
def _count_top_diseases(graph, n=15):
return collections.Counter({
(
node.name or node.identifier
if NAME in node or IDENTIFIER in node else
str(node)
): count
for node, count in get_top_pathologies(graph, n=n)
if isinstance(node, BaseAbundance)
})
| 37.540107
| 119
| 0.715385
|
9d92d1204558911fe691422c7080a879ca94679d
| 254
|
py
|
Python
|
feature_engine/wrappers/__init__.py
|
noahjgreen295/feature_engine
|
77248ade812f03bb1b9c0c129320f0c0baad61ed
|
[
"BSD-3-Clause"
] | 650
|
2018-12-31T11:44:19.000Z
|
2022-03-05T03:16:21.000Z
|
feature_engine/wrappers/__init__.py
|
noahjgreen295/feature_engine
|
77248ade812f03bb1b9c0c129320f0c0baad61ed
|
[
"BSD-3-Clause"
] | 205
|
2019-03-21T02:17:53.000Z
|
2021-09-29T08:41:38.000Z
|
feature_engine/wrappers/__init__.py
|
noahjgreen295/feature_engine
|
77248ade812f03bb1b9c0c129320f0c0baad61ed
|
[
"BSD-3-Clause"
] | 193
|
2019-01-09T21:06:46.000Z
|
2021-12-14T09:00:54.000Z
|
"""
The module wrappers includes classes to wrap Scikit-learn transformers so that they
can be applied to a selected subset of features and return a dataframe.
"""
from .wrappers import SklearnTransformerWrapper
__all__ = ["SklearnTransformerWrapper"]
| 28.222222
| 83
| 0.80315
|
99d92d3113c7323eb264833be81b5b0600c03774
| 4,312
|
py
|
Python
|
src/windshape/drone/common/DronePose.py
|
Adrien4193/windshape
|
4c73a4a85409f04518029f0ddb8bd7e3c60e4905
|
[
"BSD-2-Clause"
] | null | null | null |
src/windshape/drone/common/DronePose.py
|
Adrien4193/windshape
|
4c73a4a85409f04518029f0ddb8bd7e3c60e4905
|
[
"BSD-2-Clause"
] | null | null | null |
src/windshape/drone/common/DronePose.py
|
Adrien4193/windshape
|
4c73a4a85409f04518029f0ddb8bd7e3c60e4905
|
[
"BSD-2-Clause"
] | null | null | null |
import math
import numpy
# ROS transformations
from tf.transformations import (euler_from_quaternion,
quaternion_from_euler)
# ROS messages for communication
from geometry_msgs.msg import Quaternion, PoseStamped
class DronePose(object):
"""Structure used to represent a 6DOF pose.
Can be converted as list and str.
x, y and z coordinates are in meters.
roll, pitch and yaw are in radians.
When converted as list or array: 0=x, ..., 5=yaw
Inherits from object.
Overrides __init__, __del__, __iter__, __str__
"""
@staticmethod
def fromPoseStamped(poseStamped):
"""Returns a DronePose from a PoseStamped message."""
# Position
x = poseStamped.pose.position.x
y = poseStamped.pose.position.y
z = poseStamped.pose.position.z
# Orientation
q = poseStamped.pose.orientation
roll, pitch, yaw = euler_from_quaternion([q.x, q.y, q.z, q.w])
return DronePose(x, y, z, roll, pitch, yaw)
def __init__(self, x=0, y=0, z=0, roll=0, pitch=0, yaw=0):
"""Initializes x, y, z, roll, pitch, yaw (m, rad).
Args:
x, y, z (float): Drone position [m]
roll, pitch, yaw (float): Drone orientation [rad]
"""
self.__x = x
self.__y = y
self.__z = z
self.__roll = roll
self.__pitch = pitch
self.__yaw = yaw
def __del__(self):
"""Does nothing special."""
pass
def __iter__(self):
"""Used to convert the pose as a list."""
yield self.__x
yield self.__y
yield self.__z
yield self.__roll
yield self.__pitch
yield self.__yaw
def __str__(self):
"""Returns a string representing the pose in mm and deg."""
x, y, z, roll, pitch, yaw = list(self)
return '\n'.join([
'x: {:.2f} mm'.format(1000*x),
'y: {:.2f} mm'.format(1000*y),
'z: {:.2f} mm'.format(1000*z),
'roll: {:.2f} deg'.format(math.degrees(roll)),
'pitch: {:.2f} deg'.format(math.degrees(pitch)),
'yaw: {:.2f} deg'.format(math.degrees(yaw))
])
#
# Private methods to get attributes and make conversions.
#
def getX(self):
"""Returns the X coordinate of the pose in [m]."""
return self.__x
def getY(self):
"""Returns the Y coordinate of the pose in [m]."""
return self.__y
def getZ(self):
"""Returns the Z coordinate of the pose in [m]."""
return self.__z
def getRoll(self):
"""Returns the Roll coordinate of the pose in [rad]."""
return self.__roll
def getPitch(self):
"""Returns the Pitch coordinate of the pose in [rad]."""
return self.__pitch
def getYaw(self):
"""Returns the Yaw coordinate of the pose in [rad]."""
return self.__yaw
def setX(self, value):
"""Changes the X coordinate (float) of the pose in [m]."""
self.__x = float(value)
def setY(self):
"""Changes the Y coordinate (float) of the pose in [m]."""
self.__y = float(value)
def setZ(self):
"""Changes the Z coordinate (float) of the pose in [m]."""
self.__z = float(value)
def setRoll(self):
"""Changes the Roll coordinate (float) of the pose in [rad]."""
self.__roll = float(value)
def setPitch(self):
"""Changes the Pitch coordinate (float) of the pose in [rad]."""
self.__pitch = float(value)
def setYaw(self):
"""Changes the Yaw coordinate (float) of the pose in [rad]."""
self.__yaw = float(value)
def toArray(self):
"""Returns a numpy.array representing the pose."""
return numpy.array(list(self))
def toPoseStamped(self):
"""Returns the pose as a PoseStamped message."""
poseStamped = PoseStamped()
x, y, z, roll, pitch, yaw = list(self)
# Position
poseStamped.pose.position.x = x
poseStamped.pose.position.y = y
poseStamped.pose.position.z = z
# Orientation
quaternion = quaternion_from_euler(roll, pitch, yaw)
poseStamped.pose.orientation = Quaternion(*quaternion)
return poseStamped
def toString(self, label, shift=0, indent=4):
"""Returns a string to display the pose.
Structure:
<shift>*"space" label:
<shift>*"space" <indent>*"space" coordinate 1
...
Args:
label (str): The label of the pose.
shift (int): Number of spaces to shift the whole string.
indent=4 (int): Number of spaces to indent pose from label.
"""
pose = indent*' '+str(self).replace('\n', '\n'+indent*' ')
string = '{}:\n{}'.format(label, pose)
return shift*' '+string.replace('\n', '\n'+shift*' ')
| 25.364706
| 66
| 0.652134
|
43e6d542f4d966d3c3cbd048fb452ca91d98f0d8
| 2,304
|
py
|
Python
|
ginomypy/plugin.py
|
Saturn-Technologies/gino-stubs
|
8628594e5fbc582d6094d853cf93ce9af06b90cc
|
[
"BSD-3-Clause"
] | 12
|
2019-01-17T23:04:35.000Z
|
2021-06-07T11:04:53.000Z
|
ginomypy/plugin.py
|
Saturn-Technologies/gino-stubs
|
8628594e5fbc582d6094d853cf93ce9af06b90cc
|
[
"BSD-3-Clause"
] | 53
|
2019-11-05T21:09:14.000Z
|
2022-03-24T18:04:11.000Z
|
ginomypy/plugin.py
|
Saturn-Technologies/gino-stubs
|
8628594e5fbc582d6094d853cf93ce9af06b90cc
|
[
"BSD-3-Clause"
] | 7
|
2019-01-23T09:48:54.000Z
|
2022-02-25T06:10:30.000Z
|
from typing import Callable, List, Optional, Tuple, TypeVar
from mypy.nodes import MypyFile, TypeInfo
from mypy.plugin import (
ClassDefContext,
DynamicClassDefContext,
FunctionContext,
MethodContext,
Plugin,
)
from mypy.types import Type
from sqlmypy import column_hook, grouping_hook
from .hooks import (
crud_model_values_hook,
declarative_base_hook,
model_base_class_hook,
model_init_hook,
)
from .names import COLUMN_NAME, DECLARATIVE_BASE_NAME, GROUPING_NAME, VALUES_NAMES
from .utils import get_fullname, is_declarative
T = TypeVar('T')
U = TypeVar('U')
CB = Optional[Callable[[T], None]]
CBT = Optional[Callable[[T], U]]
class GinoPlugin(Plugin):
def __is_declarative(self, fullname: str) -> bool:
info = self.lookup_fully_qualified(fullname)
if info and isinstance(info.node, TypeInfo):
# May be a model instantiation
return is_declarative(info.node)
return False
def get_function_hook(self, fullname: str) -> CBT[FunctionContext, Type]:
if fullname == COLUMN_NAME:
return column_hook # type: ignore
if fullname == GROUPING_NAME:
return grouping_hook # type: ignore
if self.__is_declarative(fullname):
return model_init_hook
return None
def get_method_hook(self, fullname: str) -> CBT[MethodContext, Type]:
if fullname == COLUMN_NAME:
return column_hook # type: ignore
if fullname == GROUPING_NAME:
return grouping_hook # type: ignore
if fullname in VALUES_NAMES:
return crud_model_values_hook
if self.__is_declarative(fullname):
return model_init_hook
return None
def get_dynamic_class_hook(self, fullname: str) -> CB[DynamicClassDefContext]:
if fullname == DECLARATIVE_BASE_NAME:
return declarative_base_hook
return None
def get_base_class_hook(self, fullname: str) -> CB[ClassDefContext]:
if self.__is_declarative(fullname):
return model_base_class_hook
return None
def get_additional_deps(self, file: MypyFile) -> List[Tuple[int, str, int]]:
if get_fullname(file) == 'gino.api':
return [(10, 'gino.crud', -1)]
return []
| 28.8
| 82
| 0.669705
|
b678517f98c2c4e32eafcfe1dfba3e921c60a79e
| 3,536
|
py
|
Python
|
MTCNN_face_detection/scripts/caffemodel_to_pytorchmodel.py
|
luo1007188754/luozhiwen138
|
26a5db8b8dc1431591265c8352a6c1c99e171fbc
|
[
"MIT"
] | 30
|
2020-11-03T10:54:37.000Z
|
2021-12-06T07:59:05.000Z
|
MTCNN_face_detection/scripts/caffemodel_to_pytorchmodel.py
|
luo1007188754/luozhiwen138
|
26a5db8b8dc1431591265c8352a6c1c99e171fbc
|
[
"MIT"
] | 1
|
2020-11-17T08:21:39.000Z
|
2020-11-17T08:21:39.000Z
|
MTCNN_face_detection/scripts/caffemodel_to_pytorchmodel.py
|
luo1007188754/luozhiwen138
|
26a5db8b8dc1431591265c8352a6c1c99e171fbc
|
[
"MIT"
] | 11
|
2020-11-03T10:54:38.000Z
|
2021-12-10T10:40:57.000Z
|
import caffe
import numpy as np
import torch
"""
# PNet
# conv1.weight (10, 3, 3, 3)
# conv1.bias (10,)
# prelu1.weight (10,)
# conv2.weight (16, 10, 3, 3)
# conv2.bias (16,)
# prelu2.weight (16,)
# conv3.weight (32, 16, 3, 3)
# conv3.bias (32,)
# prelu3.weight (32,)
# conv4-1.weight (2, 32, 1, 1)
# conv4-1.bias (2,)
# conv4-2.weight (4, 32, 1, 1)
# conv4-2.bias (4,)
# RNet
# conv1.weight (28, 3, 3, 3)
# conv1.bias (28,)
# prelu1.weight (28,)
# conv2.weight (48, 28, 3, 3)
# conv2.bias (48,)
# prelu2.weight (48,)
# conv3.weight (64, 48, 2, 2)
# conv3.bias (64,)
# prelu3.weight (64,)
# conv4.weight (128, 576)
# conv4.bias (128,)
# prelu4.weight (128,)
# conv5-1.weight (2, 128)
# conv5-1.bias (2,)
# conv5-2.weight (4, 128)
# conv5-2.bias (4,)
# ONet
# conv1.weight (32, 3, 3, 3)
# conv1.bias (32,)
# prelu1.weight (32,)
# conv2.weight (64, 32, 3, 3)
# conv2.bias (64,)
# prelu2.weight (64,)
# conv3.weight (64, 64, 3, 3)
# conv3.bias (64,)
# prelu3.weight (64,)
# conv4.weight (128, 64, 2, 2)
# conv4.bias (128,)
# prelu4.weight (128,)
# conv5.weight (256, 1152)
# conv5.bias (256,)
# prelu5.weight (256,)
# conv6-1.weight (2, 256)
# conv6-1.bias (2,)
# conv6-2.weight (4, 256)
# conv6-2.bias (4,)
# conv6-3.weight (10, 256)
# conv6-3.bias (10,)
"""
def dump_layer(net):
for param in net.params.keys():
print(param.lower() + '.weight', net.params[param][0].data.shape)
if len(net.params[param]) == 2:
print(param.lower() + '.bias', net.params[param][1].data.shape)
def convert_to_pytorch_model(net, **net_info):
model_state = {}
for param in net.params.keys():
if net_info['cls_prob'] in param:
prefix = 'cls_prob.' + param.lower().replace('-', '_')
elif net_info['bbox_offset'] in param:
prefix = 'bbox_offset.' + param.lower().replace('-', '_')
elif net_info['landmarks'] is not None and net_info['landmarks'] in param:
prefix = 'landmarks.' + param.lower().replace('-', '_')
else:
prefix = 'backend.' + param.lower()
if 'prelu' in prefix:
model_state[prefix + '.weight'] = torch.tensor(net.params[param][0].data)
else:
if len(net.params[param][0].data.shape) == 4:
model_state[prefix + '.weight'] = torch.tensor(net.params[param][0].data.transpose((0, 1, 3, 2)))
else:
model_state[prefix + '.weight'] = torch.tensor(net.params[param][0].data)
model_state[prefix + '.bias'] = torch.tensor(net.params[param][1].data)
return model_state
def covnver_pnet():
net = caffe.Net('../caffe_models/det1.prototxt', '../caffe_models/det1.caffemodel', caffe.TEST)
# dump_layer(net)
p = convert_to_pytorch_model(net, cls_prob='conv4-1', bbox_offset='conv4-2', landmarks=None)
np.save('pnet.npy', p, allow_pickle=True)
def covnver_rnet():
net = caffe.Net('../caffe_models/det2.prototxt', '../caffe_models/det2.caffemodel', caffe.TEST)
# dump_layer(net)
p = convert_to_pytorch_model(net, cls_prob='conv5-1', bbox_offset='conv5-2', landmarks=None)
np.save('rnet.npy', p, allow_pickle=True)
def covnver_onet():
net = caffe.Net('../caffe_models/det3.prototxt', '../caffe_models/det3.caffemodel', caffe.TEST)
# dump_layer(net)
p = convert_to_pytorch_model(net, cls_prob='conv6-1', bbox_offset='conv6-2', landmarks='conv6-3')
np.save('onet.npy', p, allow_pickle=True)
if __name__ == "__main__":
covnver_pnet()
covnver_rnet()
covnver_onet()
| 30.222222
| 113
| 0.610294
|
cc83ebc1713ff657f815bff8e09657fc813f684c
| 1,767
|
py
|
Python
|
conanfile.py
|
ngrodzitski/ci-training-corehard2019
|
1902bd2bcb9560d3d547d2b09415cc28327e2ca6
|
[
"MIT"
] | null | null | null |
conanfile.py
|
ngrodzitski/ci-training-corehard2019
|
1902bd2bcb9560d3d547d2b09415cc28327e2ca6
|
[
"MIT"
] | null | null | null |
conanfile.py
|
ngrodzitski/ci-training-corehard2019
|
1902bd2bcb9560d3d547d2b09415cc28327e2ca6
|
[
"MIT"
] | null | null | null |
from conans import ConanFile, CMake, tools
class HelloConan(ConanFile):
name = "hello"
version = "0.1.0"
license = "<Put the package license here>"
author = "<Put your name here> <And your email here>"
url = "<Package recipe repository url here, for issues about the package>"
description = "<Description of Hello here>"
topics = ("<Put some tag here>", "<here>", "<and here>")
settings = "os", "compiler", "build_type", "arch"
options = {"shared": [True, False]}
default_options = {"shared": False}
generators = "cmake"
def source(self):
self.run("git clone https://github.com/conan-io/hello.git")
# This small hack might be useful to guarantee proper /MT /MD linkage
# in MSVC if the packaged project doesn't have variables to set it
# properly
tools.replace_in_file("hello/CMakeLists.txt", "PROJECT(HelloWorld)",
'''PROJECT(HelloWorld)
include(${CMAKE_BINARY_DIR}/conanbuildinfo.cmake)
conan_basic_setup()''')
def build(self):
cmake = CMake(self)
cmake.configure(source_folder="hello")
cmake.build()
# Explicit way:
# self.run('cmake %s/hello %s'
# % (self.source_folder, cmake.command_line))
# self.run("cmake --build . %s" % cmake.build_config)
def package(self):
self.copy("*.h", dst="include", src="hello")
self.copy("*hello.lib", dst="lib", keep_path=False)
self.copy("*.dll", dst="bin", keep_path=False)
self.copy("*.so", dst="lib", keep_path=False)
self.copy("*.dylib", dst="lib", keep_path=False)
self.copy("*.a", dst="lib", keep_path=False)
def package_info(self):
self.cpp_info.libs = ["hello"]
| 36.8125
| 78
| 0.606112
|
dcd4ed6c8468a1ee8470030f452e3f71d975e277
| 8,200
|
py
|
Python
|
automl/google/cloud/automl_v1beta1/proto/table_spec_pb2.py
|
ace-n/google-cloud-python
|
c97bef45a0751ef5846e4abceb47967a20cb245c
|
[
"Apache-2.0"
] | 1
|
2019-06-14T10:11:59.000Z
|
2019-06-14T10:11:59.000Z
|
automl/google/cloud/automl_v1beta1/proto/table_spec_pb2.py
|
ace-n/google-cloud-python
|
c97bef45a0751ef5846e4abceb47967a20cb245c
|
[
"Apache-2.0"
] | 1
|
2018-04-06T19:51:23.000Z
|
2018-04-06T19:51:23.000Z
|
automl/google/cloud/automl_v1beta1/proto/table_spec_pb2.py
|
ace-n/google-cloud-python
|
c97bef45a0751ef5846e4abceb47967a20cb245c
|
[
"Apache-2.0"
] | 1
|
2020-04-14T10:47:41.000Z
|
2020-04-14T10:47:41.000Z
|
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/cloud/automl_v1beta1/proto/table_spec.proto
import sys
_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1"))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
from google.cloud.automl_v1beta1.proto import (
io_pb2 as google_dot_cloud_dot_automl__v1beta1_dot_proto_dot_io__pb2,
)
DESCRIPTOR = _descriptor.FileDescriptor(
name="google/cloud/automl_v1beta1/proto/table_spec.proto",
package="google.cloud.automl.v1beta1",
syntax="proto3",
serialized_options=_b(
"\n\037com.google.cloud.automl.v1beta1P\001ZAgoogle.golang.org/genproto/googleapis/cloud/automl/v1beta1;automl\312\002\033Google\\Cloud\\AutoMl\\V1beta1"
),
serialized_pb=_b(
'\n2google/cloud/automl_v1beta1/proto/table_spec.proto\x12\x1bgoogle.cloud.automl.v1beta1\x1a\x1cgoogle/api/annotations.proto\x1a*google/cloud/automl_v1beta1/proto/io.proto"\xae\x01\n\tTableSpec\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x1b\n\x13time_column_spec_id\x18\x02 \x01(\t\x12\x11\n\trow_count\x18\x03 \x01(\x03\x12\x14\n\x0c\x63olumn_count\x18\x07 \x01(\x03\x12?\n\rinput_configs\x18\x05 \x03(\x0b\x32(.google.cloud.automl.v1beta1.InputConfig\x12\x0c\n\x04\x65tag\x18\x06 \x01(\tB\x84\x01\n\x1f\x63om.google.cloud.automl.v1beta1P\x01ZAgoogle.golang.org/genproto/googleapis/cloud/automl/v1beta1;automl\xca\x02\x1bGoogle\\Cloud\\AutoMl\\V1beta1b\x06proto3'
),
dependencies=[
google_dot_api_dot_annotations__pb2.DESCRIPTOR,
google_dot_cloud_dot_automl__v1beta1_dot_proto_dot_io__pb2.DESCRIPTOR,
],
)
_TABLESPEC = _descriptor.Descriptor(
name="TableSpec",
full_name="google.cloud.automl.v1beta1.TableSpec",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="name",
full_name="google.cloud.automl.v1beta1.TableSpec.name",
index=0,
number=1,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="time_column_spec_id",
full_name="google.cloud.automl.v1beta1.TableSpec.time_column_spec_id",
index=1,
number=2,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="row_count",
full_name="google.cloud.automl.v1beta1.TableSpec.row_count",
index=2,
number=3,
type=3,
cpp_type=2,
label=1,
has_default_value=False,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="column_count",
full_name="google.cloud.automl.v1beta1.TableSpec.column_count",
index=3,
number=7,
type=3,
cpp_type=2,
label=1,
has_default_value=False,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="input_configs",
full_name="google.cloud.automl.v1beta1.TableSpec.input_configs",
index=4,
number=5,
type=11,
cpp_type=10,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="etag",
full_name="google.cloud.automl.v1beta1.TableSpec.etag",
index=5,
number=6,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=158,
serialized_end=332,
)
_TABLESPEC.fields_by_name[
"input_configs"
].message_type = google_dot_cloud_dot_automl__v1beta1_dot_proto_dot_io__pb2._INPUTCONFIG
DESCRIPTOR.message_types_by_name["TableSpec"] = _TABLESPEC
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
TableSpec = _reflection.GeneratedProtocolMessageType(
"TableSpec",
(_message.Message,),
dict(
DESCRIPTOR=_TABLESPEC,
__module__="google.cloud.automl_v1beta1.proto.table_spec_pb2",
__doc__="""A specification of a relational table. The table's schema is represented
via its child column specs. It is pre-populated as part of ImportData by
schema inference algorithm, the version of which is a required parameter
of ImportData InputConfig. Note: While working with a table, at times
the schema may be inconsistent with the data in the table (e.g. string
in a FLOAT64 column). The consistency validation is done upon creation
of a model. Used by: \* Tables
Attributes:
name:
Output only. The resource name of the table spec. Form: ``pro
jects/{project_id}/locations/{location_id}/datasets/{dataset_i
d}/tableSpecs/{table_spec_id}``
time_column_spec_id:
column\_spec\_id of the time column. Only used if the parent
dataset's ml\_use\_column\_spec\_id is not set. Used to split
rows into TRAIN, VALIDATE and TEST sets such that oldest rows
go to TRAIN set, newest to TEST, and those in between to
VALIDATE. Required type: TIMESTAMP. If both this column and
ml\_use\_column are not set, then ML use of all rows will be
assigned by AutoML. NOTE: Updates of this field will instantly
affect any other users concurrently working with the dataset.
row_count:
Output only. The number of rows (i.e. examples) in the table.
column_count:
Output only. The number of columns of the table. That is, the
number of child ColumnSpec-s.
input_configs:
Output only. Input configs via which data currently residing
in the table had been imported.
etag:
Used to perform consistent read-modify-write updates. If not
set, a blind "overwrite" update happens.
""",
# @@protoc_insertion_point(class_scope:google.cloud.automl.v1beta1.TableSpec)
),
)
_sym_db.RegisterMessage(TableSpec)
DESCRIPTOR._options = None
# @@protoc_insertion_point(module_scope)
| 36.7713
| 668
| 0.642805
|
735456fef2835de56a4b56f05cd8b7cc7be56ca0
| 2,539
|
py
|
Python
|
packages/w3af/w3af/plugins/attack/db/tests/test_sqlmap_update.py
|
ZooAtmosphereGroup/HelloPackages
|
0ccffd33bf927b13d28c8f715ed35004c33465d9
|
[
"Apache-2.0"
] | 3
|
2019-04-09T22:59:33.000Z
|
2019-06-14T09:23:24.000Z
|
tools/w3af/w3af/plugins/attack/db/tests/test_sqlmap_update.py
|
sravani-m/Web-Application-Security-Framework
|
d9f71538f5cba6fe1d8eabcb26c557565472f6a6
|
[
"MIT"
] | null | null | null |
tools/w3af/w3af/plugins/attack/db/tests/test_sqlmap_update.py
|
sravani-m/Web-Application-Security-Framework
|
d9f71538f5cba6fe1d8eabcb26c557565472f6a6
|
[
"MIT"
] | null | null | null |
"""
test_sqlmap_update.py
Copyright 2012 Andres Riancho
This file is part of w3af, http://w3af.org/ .
w3af is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation version 2 of the License.
w3af is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with w3af; if not, write to the Free Software
Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
"""
import unittest
from w3af.core.data.misc.file_utils import get_days_since_last_update
from w3af.plugins.attack.db.sqlmap_wrapper import SQLMapWrapper
class TestSQLMapUpdate(unittest.TestCase):
"""Verify that we have an updated version of sqlmap within w3af"""
def test_updated(self):
days = get_days_since_last_update(SQLMapWrapper.SQLMAP_LOCATION)
# See http://nuclearsquid.com/writings/subtree-merging-and-you/
# https://www.kernel.org/pub/software/scm/git/docs/howto/using-merge-subtree.html
#
# This requires git >= 1.8
# sudo add-apt-repository ppa:git-core/ppa
# sudo apt-get update
# sudo apt-get install git
#
setup_commands = ('git remote add -f'
' sqlmap git://github.com/sqlmapproject/sqlmap.git',
'git subtree add'
' --prefix=w3af/plugins/attack/db/sqlmap/'
' --squash sqlmap master')
setup_str = ''.join([' %s\n' % scmd for scmd in setup_commands])
maintain_commands = ('git subtree pull'
' --prefix=w3af/plugins/attack/db/sqlmap'
' --squash sqlmap master',
'git push')
maintain_str = ''.join([' %s\n' % mcmd for mcmd in maintain_commands])
msg = ('\nYou need to update the sqlmap installation that\'s embedded'
' with w3af. If you run "git remote" and sqlmap appears in the'
' output just run:\n'
'%s\n'
'Worse case scenario you will have to setup the remote:\n'
'%s')
msg %= (maintain_str, setup_str)
self.assertLess(days, 30, msg)
| 38.469697
| 93
| 0.618354
|
b5cff3ae02b90d8f2b098d7a53a506320f1eda15
| 291
|
py
|
Python
|
mylibrary/setup.py
|
quantshah/scikit-project
|
fd23c0fdb6d501e5d48f8655d1fabb4735efb021
|
[
"BSD-3-Clause"
] | 48
|
2019-09-11T12:36:54.000Z
|
2021-05-28T18:35:09.000Z
|
mylibrary/setup.py
|
quantshah/scikit-project
|
fd23c0fdb6d501e5d48f8655d1fabb4735efb021
|
[
"BSD-3-Clause"
] | 24
|
2019-11-10T11:55:48.000Z
|
2021-04-23T16:54:50.000Z
|
mylibrary/setup.py
|
quantshah/scikit-project
|
fd23c0fdb6d501e5d48f8655d1fabb4735efb021
|
[
"BSD-3-Clause"
] | 15
|
2019-12-02T15:22:42.000Z
|
2021-04-30T18:55:48.000Z
|
#!/usr/bin/env python
"""
Setup file
"""
from setuptools import setup, find_packages
setup(name='mylibrary',
version='0.0.1',
description='Test library',
author='Shahnawaz Ahmed, Nathan Shammah',
packages = find_packages(include=['mylibrary', 'mylibrary.*'])
)
| 22.384615
| 68
| 0.656357
|
7dbec8eee77359792001e78aef3d8fb375aadb08
| 983
|
py
|
Python
|
social_media_crawler.py
|
joaopfonseca/social_media_crawler
|
3abce6c850d203805f705e82612d98abf2eb4a47
|
[
"MIT"
] | 1
|
2021-11-14T18:45:01.000Z
|
2021-11-14T18:45:01.000Z
|
social_media_crawler.py
|
joaopfonseca/social_media_crawler
|
3abce6c850d203805f705e82612d98abf2eb4a47
|
[
"MIT"
] | 2
|
2021-06-08T19:27:42.000Z
|
2021-06-08T19:32:43.000Z
|
social_media_crawler.py
|
joaopfonseca/social_media_crawler
|
3abce6c850d203805f705e82612d98abf2eb4a47
|
[
"MIT"
] | null | null | null |
import os
import subprocess
import sys
import time
project_dir = os.path.realpath('')
gui = '/gui/FlaskApp/'
os.chdir(project_dir+gui)
f = subprocess.Popen(['python','db_facebook.py'])
i = subprocess.Popen(['python','db_instagram.py'])
t = subprocess.Popen(['python','db_twitter.py'])
gui = subprocess.Popen(['python','__init__.py'])
time.sleep(3)
os.system('open http://0.0.0.0:5000/')
kw_file = open('support/active_keyword','r')
active_keyword = kw_file.readline()
keyword = active_keyword.rstrip()
kw_file.close()
while True:
kw_file2 = open('support/active_keyword','r')
active_keyword2 = kw_file2.readline()
keyword2 = active_keyword2.rstrip()
kw_file2.close()
if keyword != keyword2:
f.terminate()
i.terminate()
t.terminate()
keyword=keyword2
f = subprocess.Popen(['python','db_facebook.py'])
i = subprocess.Popen(['python','db_instagram.py'])
t = subprocess.Popen(['python','db_twitter.py'])
| 25.868421
| 58
| 0.671414
|
03b89c11e307ebd94907ee7fff52e2124f2f1c60
| 4,056
|
py
|
Python
|
docker/errors.py
|
peter-slovak/docker-py
|
3076a9ac40b91458f7e95e3c6167e1bbb92682b1
|
[
"Apache-2.0"
] | 1
|
2021-07-14T15:01:30.000Z
|
2021-07-14T15:01:30.000Z
|
docker/errors.py
|
peter-slovak/docker-py
|
3076a9ac40b91458f7e95e3c6167e1bbb92682b1
|
[
"Apache-2.0"
] | null | null | null |
docker/errors.py
|
peter-slovak/docker-py
|
3076a9ac40b91458f7e95e3c6167e1bbb92682b1
|
[
"Apache-2.0"
] | null | null | null |
import requests
class DockerException(Exception):
"""
A base class from which all other exceptions inherit.
If you want to catch all errors that the Docker SDK might raise,
catch this base exception.
"""
def create_api_error_from_http_exception(e):
"""
Create a suitable APIError from requests.exceptions.HTTPError.
"""
response = e.response
try:
explanation = response.json()['message']
except ValueError:
explanation = response.content.strip()
cls = APIError
if response.status_code == 404:
if explanation and ('No such image' in str(explanation) or
'not found: does not exist or no pull access'
in str(explanation)):
cls = ImageNotFound
else:
cls = NotFound
raise cls(e, response=response, explanation=explanation)
class APIError(requests.exceptions.HTTPError, DockerException):
"""
An HTTP error from the API.
"""
def __init__(self, message, response=None, explanation=None):
# requests 1.2 supports response as a keyword argument, but
# requests 1.1 doesn't
super(APIError, self).__init__(message)
self.response = response
self.explanation = explanation
def __str__(self):
message = super(APIError, self).__str__()
if self.is_client_error():
message = '{0} Client Error: {1}'.format(
self.response.status_code, self.response.reason)
elif self.is_server_error():
message = '{0} Server Error: {1}'.format(
self.response.status_code, self.response.reason)
if self.explanation:
message = '{0} ("{1}")'.format(message, self.explanation)
return message
@property
def status_code(self):
if self.response is not None:
return self.response.status_code
def is_client_error(self):
if self.status_code is None:
return False
return 400 <= self.status_code < 500
def is_server_error(self):
if self.status_code is None:
return False
return 500 <= self.status_code < 600
class NotFound(APIError):
pass
class ImageNotFound(NotFound):
pass
class InvalidVersion(DockerException):
pass
class InvalidRepository(DockerException):
pass
class InvalidConfigFile(DockerException):
pass
class InvalidArgument(DockerException):
pass
class DeprecatedMethod(DockerException):
pass
class TLSParameterError(DockerException):
def __init__(self, msg):
self.msg = msg
def __str__(self):
return self.msg + (". TLS configurations should map the Docker CLI "
"client configurations. See "
"https://docs.docker.com/engine/articles/https/ "
"for API details.")
class NullResource(DockerException, ValueError):
pass
class ContainerError(DockerException):
"""
Represents a container that has exited with a non-zero exit code.
"""
def __init__(self, container, exit_status, command, image, stderr):
self.container = container
self.exit_status = exit_status
self.command = command
self.image = image
self.stderr = stderr
msg = ("Command '{}' in image '{}' returned non-zero exit status {}: "
"{}").format(command, image, exit_status, stderr)
super(ContainerError, self).__init__(msg)
class StreamParseError(RuntimeError):
def __init__(self, reason):
self.msg = reason
class BuildError(Exception):
pass
def create_unexpected_kwargs_error(name, kwargs):
quoted_kwargs = ["'{}'".format(k) for k in sorted(kwargs)]
text = ["{}() ".format(name)]
if len(quoted_kwargs) == 1:
text.append("got an unexpected keyword argument ")
else:
text.append("got unexpected keyword arguments ")
text.append(', '.join(quoted_kwargs))
return TypeError(''.join(text))
| 26.684211
| 78
| 0.628945
|
d64b0f00b681d410a8fde915966b5b95a71c9a79
| 11,231
|
py
|
Python
|
sdk/compute/azure-mgmt-compute/azure/mgmt/compute/v2020_12_01/aio/operations/_virtual_machine_extension_images_operations.py
|
rsdoherty/azure-sdk-for-python
|
6bba5326677468e6660845a703686327178bb7b1
|
[
"MIT"
] | 2,728
|
2015-01-09T10:19:32.000Z
|
2022-03-31T14:50:33.000Z
|
sdk/compute/azure-mgmt-compute/azure/mgmt/compute/v2020_12_01/aio/operations/_virtual_machine_extension_images_operations.py
|
rsdoherty/azure-sdk-for-python
|
6bba5326677468e6660845a703686327178bb7b1
|
[
"MIT"
] | 17,773
|
2015-01-05T15:57:17.000Z
|
2022-03-31T23:50:25.000Z
|
sdk/compute/azure-mgmt-compute/azure/mgmt/compute/v2020_12_01/aio/operations/_virtual_machine_extension_images_operations.py
|
rsdoherty/azure-sdk-for-python
|
6bba5326677468e6660845a703686327178bb7b1
|
[
"MIT"
] | 1,916
|
2015-01-19T05:05:41.000Z
|
2022-03-31T19:36:44.000Z
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class VirtualMachineExtensionImagesOperations:
"""VirtualMachineExtensionImagesOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.compute.v2020_12_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def get(
self,
location: str,
publisher_name: str,
type: str,
version: str,
**kwargs: Any
) -> "_models.VirtualMachineExtensionImage":
"""Gets a virtual machine extension image.
:param location: The name of a supported Azure region.
:type location: str
:param publisher_name:
:type publisher_name: str
:param type:
:type type: str
:param version:
:type version: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: VirtualMachineExtensionImage, or the result of cls(response)
:rtype: ~azure.mgmt.compute.v2020_12_01.models.VirtualMachineExtensionImage
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualMachineExtensionImage"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-12-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'location': self._serialize.url("location", location, 'str'),
'publisherName': self._serialize.url("publisher_name", publisher_name, 'str'),
'type': self._serialize.url("type", type, 'str'),
'version': self._serialize.url("version", version, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('VirtualMachineExtensionImage', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Compute/locations/{location}/publishers/{publisherName}/artifacttypes/vmextension/types/{type}/versions/{version}'} # type: ignore
async def list_types(
self,
location: str,
publisher_name: str,
**kwargs: Any
) -> List["_models.VirtualMachineExtensionImage"]:
"""Gets a list of virtual machine extension image types.
:param location: The name of a supported Azure region.
:type location: str
:param publisher_name:
:type publisher_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: list of VirtualMachineExtensionImage, or the result of cls(response)
:rtype: list[~azure.mgmt.compute.v2020_12_01.models.VirtualMachineExtensionImage]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[List["_models.VirtualMachineExtensionImage"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-12-01"
accept = "application/json"
# Construct URL
url = self.list_types.metadata['url'] # type: ignore
path_format_arguments = {
'location': self._serialize.url("location", location, 'str'),
'publisherName': self._serialize.url("publisher_name", publisher_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('[VirtualMachineExtensionImage]', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
list_types.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Compute/locations/{location}/publishers/{publisherName}/artifacttypes/vmextension/types'} # type: ignore
async def list_versions(
self,
location: str,
publisher_name: str,
type: str,
filter: Optional[str] = None,
top: Optional[int] = None,
orderby: Optional[str] = None,
**kwargs: Any
) -> List["_models.VirtualMachineExtensionImage"]:
"""Gets a list of virtual machine extension image versions.
:param location: The name of a supported Azure region.
:type location: str
:param publisher_name:
:type publisher_name: str
:param type:
:type type: str
:param filter: The filter to apply on the operation.
:type filter: str
:param top:
:type top: int
:param orderby:
:type orderby: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: list of VirtualMachineExtensionImage, or the result of cls(response)
:rtype: list[~azure.mgmt.compute.v2020_12_01.models.VirtualMachineExtensionImage]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[List["_models.VirtualMachineExtensionImage"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-12-01"
accept = "application/json"
# Construct URL
url = self.list_versions.metadata['url'] # type: ignore
path_format_arguments = {
'location': self._serialize.url("location", location, 'str'),
'publisherName': self._serialize.url("publisher_name", publisher_name, 'str'),
'type': self._serialize.url("type", type, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if filter is not None:
query_parameters['$filter'] = self._serialize.query("filter", filter, 'str')
if top is not None:
query_parameters['$top'] = self._serialize.query("top", top, 'int')
if orderby is not None:
query_parameters['$orderby'] = self._serialize.query("orderby", orderby, 'str')
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('[VirtualMachineExtensionImage]', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
list_versions.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Compute/locations/{location}/publishers/{publisherName}/artifacttypes/vmextension/types/{type}/versions'} # type: ignore
| 46.218107
| 211
| 0.666014
|
589a4957a8373af9f6d2cb67b09f8d340cbf2750
| 8,725
|
py
|
Python
|
hummingbot/client/command/start_command.py
|
dam-bots/hummingbot
|
fd15d95a854fbe41735f5b2bab38863867d865e1
|
[
"Apache-2.0"
] | null | null | null |
hummingbot/client/command/start_command.py
|
dam-bots/hummingbot
|
fd15d95a854fbe41735f5b2bab38863867d865e1
|
[
"Apache-2.0"
] | null | null | null |
hummingbot/client/command/start_command.py
|
dam-bots/hummingbot
|
fd15d95a854fbe41735f5b2bab38863867d865e1
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
import asyncio
import platform
import threading
import time
from typing import (
Optional,
Callable,
)
from os.path import dirname, join
from hummingbot.core.clock import (
Clock,
ClockMode
)
from hummingbot import init_logging
from hummingbot.client.config.config_helpers import (
get_strategy_starter_file,
)
import hummingbot.client.settings as settings
from hummingbot.core.utils.async_utils import safe_ensure_future
from hummingbot.core.utils.kill_switch import KillSwitch
from typing import TYPE_CHECKING
from hummingbot.client.config.global_config_map import global_config_map
from hummingbot.script.script_iterator import ScriptIterator
from hummingbot.connector.connector_status import get_connector_status, warning_messages
from hummingbot.client.config.config_var import ConfigVar
from hummingbot.client.command.rate_command import RateCommand
from hummingbot.client.config.config_validators import validate_bool
from hummingbot.core.rate_oracle.rate_oracle import RateOracle
from hummingbot.exceptions import OracleRateUnavailable
if TYPE_CHECKING:
from hummingbot.client.hummingbot_application import HummingbotApplication
class StartCommand:
async def _run_clock(self):
with self.clock as clock:
await clock.run()
async def wait_till_ready(self, # type: HummingbotApplication
func: Callable, *args, **kwargs):
while True:
all_ready = all([market.ready for market in self.markets.values()])
if not all_ready:
await asyncio.sleep(0.5)
else:
return func(*args, **kwargs)
def start(self, # type: HummingbotApplication
log_level: Optional[str] = None,
restore: Optional[bool] = False):
if threading.current_thread() != threading.main_thread():
self.ev_loop.call_soon_threadsafe(self.start, log_level, restore)
return
safe_ensure_future(self.start_check(log_level, restore), loop=self.ev_loop)
async def start_check(self, # type: HummingbotApplication
log_level: Optional[str] = None,
restore: Optional[bool] = False):
if self.strategy_task is not None and not self.strategy_task.done():
self._notify('The bot is already running - please run "stop" first')
return
if settings.required_rate_oracle:
if not (await self.confirm_oracle_conversion_rate()):
self._notify("The strategy failed to start.")
return
else:
RateOracle.get_instance().start()
is_valid = await self.status_check_all(notify_success=False)
if not is_valid:
self._notify("Status checks failed. Start aborted.")
return
if self._last_started_strategy_file != self.strategy_file_name:
init_logging("hummingbot_logs.yml",
override_log_level=log_level.upper() if log_level else None,
strategy_file_path=self.strategy_file_name)
self._last_started_strategy_file = self.strategy_file_name
# If macOS, disable App Nap.
if platform.system() == "Darwin":
import appnope
appnope.nope()
self._initialize_notifiers()
# self._notify(f"\nStatus check complete. Starting '{self.strategy_name}' strategy...")
if any([str(exchange).endswith("paper_trade") for exchange in settings.required_exchanges]):
self._notify("\nPaper Trading Active: All orders are simulated, and no real orders are placed.")
for exchange in settings.required_exchanges:
connector = str(exchange)
status = get_connector_status(connector)
# Display custom warning message for specific connectors
warning_msg = warning_messages.get(connector, None)
if warning_msg is not None:
# self._notify(f"\nConnector status: {status}\n"
# f"{warning_msg}")
pass
# Display warning message if the exchange connector has outstanding issues or not working
elif status != "GREEN":
# self._notify(f"\nConnector status: {status}. This connector has one or more issues.\n"
# "Refer to our Github page for more info: https://github.com/coinalpha/hummingbot")
pass
await self.start_market_making(self.strategy_name, restore)
async def start_market_making(self, # type: HummingbotApplication
strategy_name: str,
restore: Optional[bool] = False):
start_strategy: Callable = get_strategy_starter_file(strategy_name)
if strategy_name in settings.STRATEGIES:
start_strategy(self)
else:
raise NotImplementedError
try:
config_path: str = self.strategy_file_name
self.start_time = time.time() * 1e3 # Time in milliseconds
self.clock = Clock(ClockMode.REALTIME)
for market in self.markets.values():
if market is not None:
self.clock.add_iterator(market)
self.markets_recorder.restore_market_states(config_path, market)
if len(market.limit_orders) > 0:
if restore is False:
self._notify(f"Cancelling dangling limit orders on {market.name}...")
await market.cancel_all(5.0)
else:
self._notify(f"Restored {len(market.limit_orders)} limit orders on {market.name}...")
if self.strategy:
self.clock.add_iterator(self.strategy)
if global_config_map["script_enabled"].value:
script_file = global_config_map["script_file_path"].value
folder = dirname(script_file)
if folder == "":
script_file = join(settings.SCRIPTS_PATH, script_file)
if self.strategy_name != "pure_market_making":
self._notify("Error: script feature is only available for pure_market_making strategy (for now).")
else:
self._script_iterator = ScriptIterator(script_file, list(self.markets.values()),
self.strategy, 0.1)
self.clock.add_iterator(self._script_iterator)
self._notify(f"Script ({script_file}) started.")
self.strategy_task: asyncio.Task = safe_ensure_future(self._run_clock(), loop=self.ev_loop)
self._notify(f"\n'{strategy_name}' strategy started.\n"
f"Run `status` command to query the progress.")
self.logger().info("start command initiated.")
if self.strategy_name == "uniswap_v3_lp": # this would be removed in subsequent iterations
self._notify("Warning: Ensure that the trading pair is in the right order .i.e. {BASE}-{QUOTE}.")
if self._trading_required:
self.kill_switch = KillSwitch(self)
await self.wait_till_ready(self.kill_switch.start)
except Exception as e:
self.logger().error(str(e), exc_info=True)
async def confirm_oracle_conversion_rate(self, # type: HummingbotApplication
) -> bool:
try:
result = False
self.app.clear_input()
self.placeholder_mode = True
self.app.hide_input = True
for pair in settings.rate_oracle_pairs:
msg = await RateCommand.oracle_rate_msg(pair)
self._notify("\nRate Oracle:\n" + msg)
config = ConfigVar(key="confirm_oracle_use",
type_str="bool",
prompt="Please confirm to proceed if the above oracle source and rates are correct for "
"this strategy (Yes/No) >>> ",
required_if=lambda: True,
validator=lambda v: validate_bool(v))
await self.prompt_a_config(config)
if config.value:
result = True
except OracleRateUnavailable:
self._notify("Oracle rate is not available.")
finally:
self.placeholder_mode = False
self.app.hide_input = False
self.app.change_prompt(prompt=">>> ")
return result
| 45.921053
| 119
| 0.611003
|
b3c33d1b637b44c75c6bbe77b57a5984d1767b04
| 1,342
|
py
|
Python
|
Servus/plugins/system_ip_online/utils.py
|
sug4rok/Servus
|
9840d0e275085c08b99fc7662eb22f2ab253d8f8
|
[
"MIT"
] | null | null | null |
Servus/plugins/system_ip_online/utils.py
|
sug4rok/Servus
|
9840d0e275085c08b99fc7662eb22f2ab253d8f8
|
[
"MIT"
] | null | null | null |
Servus/plugins/system_ip_online/utils.py
|
sug4rok/Servus
|
9840d0e275085c08b99fc7662eb22f2ab253d8f8
|
[
"MIT"
] | null | null | null |
# coding=utf-8
import time
from subprocess import call
from os import devnull
from django.conf import settings
def subprocess_ping(host):
"""
Простой ping. Ждем результата однопакетного пинга.
:param host: str Сетевое имя или ip-адрес устройство.
:returns: bool Состояние (online/offline) сетевого устройства.
"""
if settings.OS == 'linux':
command = ['ping', '-c', '1', '-n', '-W', '1', '-q', host]
elif settings.OS == 'windows':
command = ['ping', '-n', '1', '-4', '-w', '1', host]
else:
return None
return call(command, stdout=open(devnull, 'w'), stderr=open(devnull, 'w')) == 0
def triple_ping(host):
"""
Тройной ping. Ждем результата однопакетного пинга с 5 секундным интервалом.
Если трижды результат ping совпадает, возвращаем результат.
:param host: str Сетевое имя или ip-адрес устройство.
:returns: bool Состояние (online/offline) сетевого устройства.
"""
counter = 3
result = True
# Делаем три пинга с 5 секундным перерывом.
# Если все три совпадения совпадают, считаем результат окончательным.
while counter:
response = subprocess_ping(host)
if result == response:
counter -= 1
else:
result = response
counter = 2
time.sleep(5)
return result
| 26.84
| 83
| 0.628167
|
ae827cca3ebcfea241521ab13e501dcb9c120d31
| 5,074
|
py
|
Python
|
tests/test_commodity.py
|
aslehigh/piecash
|
797aca5abd08b686e5d47f077b00a095fb4804ed
|
[
"MIT"
] | null | null | null |
tests/test_commodity.py
|
aslehigh/piecash
|
797aca5abd08b686e5d47f077b00a095fb4804ed
|
[
"MIT"
] | null | null | null |
tests/test_commodity.py
|
aslehigh/piecash
|
797aca5abd08b686e5d47f077b00a095fb4804ed
|
[
"MIT"
] | null | null | null |
# coding=utf-8
from __future__ import unicode_literals
from datetime import datetime, date
from decimal import Decimal
import pytest
from piecash import Price, Commodity, GnucashException
from piecash.core.commodity import GncPriceError
from test_helper import db_sqlite_uri, db_sqlite, new_book, new_book_USD, book_uri, book_basic, is_not_on_web, is_inmemory_sqlite
# dummy line to avoid removing unused symbols
a = db_sqlite_uri, db_sqlite, new_book, new_book_USD, book_uri, book_basic
class TestCommodity_create_commodity(object):
def test_create_commodity(self, book_basic):
assert len(book_basic.commodities) == 2
cdty = Commodity(namespace="AMEX", mnemonic="APPLE", fullname="Apple", book=book_basic)
book_basic.flush()
assert len(book_basic.commodities) == 3
with pytest.raises(GnucashException):
cdty.base_currency
cdty["quoted_currency"] = "EUR"
assert cdty.base_currency == book_basic.commodities(mnemonic="EUR")
def test_base_currency_commodity(self, book_basic):
cdty = Commodity(namespace="AMEX", mnemonic="APPLE", fullname="Apple", book=book_basic)
with pytest.raises(GnucashException):
cdty.base_currency
# should trigger creation of USD currency
cdty["quoted_currency"] = "USD"
assert cdty.base_currency.mnemonic == 'USD'
book_basic.flush()
assert cdty.base_currency == book_basic.currencies(mnemonic="USD")
cdty["quoted_currency"] = "EUR"
assert cdty.base_currency == book_basic.currencies(mnemonic="EUR")
def test_base_currency_commodity_no_book(self, book_basic):
cdty = Commodity(namespace="AMEX", mnemonic="APPLE", fullname="Apple")
with pytest.raises(GnucashException):
cdty.base_currency
def test_base_currency_currency(self, book_basic):
cdty = book_basic.currencies(mnemonic="USD")
assert cdty.base_currency.mnemonic == "EUR"
class TestCommodity_create_prices(object):
def test_create_basicprice(self, book_basic):
EUR = book_basic.commodities(namespace="CURRENCY")
USD = book_basic.currencies(mnemonic="USD")
p = Price(commodity=USD, currency=EUR, date=date(2014, 2, 22), value=Decimal('0.54321'))
# check price exist
np = USD.prices.first()
assert np is p
assert repr(p) == "Price<2014-02-22 : 0.54321 EUR/USD>"
p2 = Price(commodity=USD, currency=EUR, date=date(2014, 2, 21), value=Decimal('0.12345'))
book_basic.flush()
assert p.value + p2.value == Decimal("0.66666")
assert len(USD.prices.all()) == 2
def test_create_duplicateprice(self, book_basic):
EUR = book_basic.commodities(namespace="CURRENCY")
USD = book_basic.currencies(mnemonic="USD")
p = Price(commodity=USD, currency=EUR, date=date(2014, 2, 22), value=Decimal('0.54321'))
p1 = Price(commodity=USD, currency=EUR, date=date(2014, 2, 22), value=Decimal('0.12345'))
book_basic.flush()
assert USD.prices.filter_by(value=Decimal('0')).all() == []
assert USD.prices.filter_by(value=Decimal('0.12345')).one() == p1
from sqlalchemy.orm.exc import NoResultFound
with pytest.raises(NoResultFound):
USD.prices.filter_by(value=Decimal('0.123')).one()
def test_update_currency_prices(self, book_basic):
if not is_inmemory_sqlite(book_basic) or is_not_on_web():
print("skipping test for {}".format(book_basic))
return
EUR = book_basic.default_currency
with pytest.raises(GncPriceError):
EUR.update_prices()
USD = book_basic.currencies(mnemonic="USD")
USD.update_prices()
assert len(list(USD.prices)) < 7
assert (USD.prices.first() is None) or (USD.prices.first().commodity is USD)
CAD = book_basic.currencies(mnemonic="CAD")
CAD.update_prices()
assert len(list(CAD.prices)) < 7
assert (CAD.prices.first() is None) or (CAD.prices.first().commodity is CAD)
# redo update prices which should not bring new prices
l = len(list(USD.prices))
USD.update_prices()
assert len(list(USD.prices)) == l
assert len(book_basic.prices) < 14
def test_update_stock_prices(self, book_basic):
if not is_inmemory_sqlite(book_basic) or is_not_on_web():
print("skipping test for {}".format(book_basic))
return
cdty = Commodity(mnemonic="AAPL", namespace="NASDAQ", fullname="Apple", book=book_basic)
cdty["quoted_currency"] = "USD"
assert cdty.get("quoted_currency") == "USD"
cdty.update_prices()
book_basic.flush()
assert len(list(cdty.prices)) < 7
cdty.update_prices()
assert len(list(cdty.prices)) < 7
def test_price_update_on_commodity_no_book(self, book_basic):
cdty = Commodity(namespace="AMEX", mnemonic="APPLE", fullname="Apple")
with pytest.raises(GncPriceError):
cdty.update_prices()
| 37.865672
| 129
| 0.668112
|
f1dcb962e89fd9320e3797996e2fdb28b0f6ac01
| 950
|
py
|
Python
|
ow/tests/models/test_appmaker.py
|
openworkouts/OpenWorkouts
|
ecfed69e8c654c09bb8c074d8aedda9c13cd2235
|
[
"BSD-3-Clause"
] | 3
|
2019-02-15T11:38:20.000Z
|
2020-10-03T19:03:51.000Z
|
ow/tests/models/test_appmaker.py
|
openworkouts/OpenWorkouts
|
ecfed69e8c654c09bb8c074d8aedda9c13cd2235
|
[
"BSD-3-Clause"
] | null | null | null |
ow/tests/models/test_appmaker.py
|
openworkouts/OpenWorkouts
|
ecfed69e8c654c09bb8c074d8aedda9c13cd2235
|
[
"BSD-3-Clause"
] | null | null | null |
from unittest.mock import patch
from ow.models import appmaker
from ow.models.root import OpenWorkouts
class TestAppMaker(object):
@patch('ow.models.transaction')
def test_appmaker(self, t):
"""
Calling appmaker on a new zodb (without an OpenWorkouts root folder in
it), a new root object is added, the transaction is committed to the
zodb and the new root object is returned
"""
zodb_root = {}
app = appmaker(zodb_root)
assert isinstance(app, OpenWorkouts)
assert t.commit.called
@patch('ow.models.transaction')
def test_appmaker_already_existing_root(self, t):
"""
Calling appmaker with a zodb that has an OpenWorkouts root, nothing
changes in that zodb
"""
zodb_root = {'app_root': 'faked-root-object'}
app = appmaker(zodb_root)
assert app == 'faked-root-object'
assert not t.commit.called
| 30.645161
| 78
| 0.648421
|
6d782ce507c6391143889f2e7699a7c504626614
| 171
|
py
|
Python
|
django_dump_load_utf8/apps.py
|
panhaoyu/django-dump-load-utf8
|
12ac58da29e2498003be0a5cf910832db347d81f
|
[
"MIT"
] | 1
|
2021-09-30T11:38:19.000Z
|
2021-09-30T11:38:19.000Z
|
django_dump_load_utf8/apps.py
|
panhaoyu/django-dump-load-utf8
|
12ac58da29e2498003be0a5cf910832db347d81f
|
[
"MIT"
] | null | null | null |
django_dump_load_utf8/apps.py
|
panhaoyu/django-dump-load-utf8
|
12ac58da29e2498003be0a5cf910832db347d81f
|
[
"MIT"
] | null | null | null |
from django.apps import AppConfig
class DjangoDumpLoadUtf8Config(AppConfig):
default_auto_field = 'django.db.models.BigAutoField'
name = 'django_dump_load_utf8'
| 24.428571
| 56
| 0.795322
|
fadd37510f2e1cc4f09e0008b31a69f48a27f767
| 1,506
|
py
|
Python
|
api/api.py
|
Engin-Boot/alert-to-care-s22b12
|
b5e6c225b476cd84b4f75175835769b01794040d
|
[
"MIT"
] | null | null | null |
api/api.py
|
Engin-Boot/alert-to-care-s22b12
|
b5e6c225b476cd84b4f75175835769b01794040d
|
[
"MIT"
] | 1
|
2022-03-02T09:57:41.000Z
|
2022-03-02T09:57:41.000Z
|
api/api.py
|
Engin-Boot/alert-to-care-s22b12
|
b5e6c225b476cd84b4f75175835769b01794040d
|
[
"MIT"
] | null | null | null |
import flask
from flask_cors import CORS, cross_origin
from patients import Patient
from patientrepository import patientrepositry
from bedsrepository import BedsRepositry
from flask import jsonify
from flask import request
app = flask.Flask(__name__)
cors = CORS(app)
app.config['CORS_HEADERS'] = 'Content-Type'
app.config["DEBUG"] = True
@app.route('/', methods=['GET'])
def home():
return "<h1>Distant Reading Archive</h1>"
@app.route('/addBeds', methods=['POST'])
def addBedsRepository():
numberofbeds = request.json['numberofbeds']
message = BedsRepositry.addBeds(numberofbeds)
return jsonify(message)
@app.route('/patient', methods=['POST'])
def patiententry():
name = request.json['name']
age = request.json['age']
patientdetails = Patient(name, age)
message = patientrepositry.addPatient(patientdetails)
return jsonify(message)
@app.route('/alertonpatientstatus', methods=['GET'])
def alertonpatientstatus():
message = patientrepositry.patientCheckVitals()
return jsonify(message)
@app.route('/resetPatientStatus', methods=['POST'])
def ResetPatientStatus():
bedid = request.json['bedid']
message = patientrepositry.resetPatientVitals(bedid)
return jsonify(message)
@app.route('/dischargePatient', methods=['POST'])
def dischargePatient():
bedid = request.json['bedid']
message = patientrepositry.dischargePatient(bedid)
return jsonify(message)
app.run()
| 25.525424
| 58
| 0.702523
|
3c3e5464079198ea3a4fbb9e9a0531db111a78f0
| 3,780
|
py
|
Python
|
cloudrail/knowledge/context/aws/elb/load_balancer.py
|
my-devops-info/cloudrail-knowledge
|
b7c1bbd6fe1faeb79c105a01c0debbe24d031a0e
|
[
"MIT"
] | null | null | null |
cloudrail/knowledge/context/aws/elb/load_balancer.py
|
my-devops-info/cloudrail-knowledge
|
b7c1bbd6fe1faeb79c105a01c0debbe24d031a0e
|
[
"MIT"
] | null | null | null |
cloudrail/knowledge/context/aws/elb/load_balancer.py
|
my-devops-info/cloudrail-knowledge
|
b7c1bbd6fe1faeb79c105a01c0debbe24d031a0e
|
[
"MIT"
] | null | null | null |
from __future__ import annotations
from dataclasses import dataclass, field
from enum import Enum
from typing import List, Optional
from cloudrail.knowledge.context.aws.elb.load_balancer_attributes import LoadBalancerAttributes
from cloudrail.knowledge.context.aws.service_name import AwsServiceName
from cloudrail.knowledge.context.aws.networking_config.network_entity import NetworkEntity
from cloudrail.knowledge.context.aws.elb.load_balancer_target_group import LoadBalancerTargetGroup
class LoadBalancerSchemeType(Enum):
INTERNAL = 'internal'
INTERNET_FACING = 'internet-facing'
class LoadBalancerType(Enum):
NETWORK = 'network'
APPLICATION = 'application'
@dataclass
class LoadBalancerSubnetMapping:
allocation_id: str
private_ipv4_address: str
subnet_id: str
@dataclass
class LoadBalancerRawData:
subnets_ids: List[str] = field(default_factory=list)
security_groups_ids: List[str] = field(default_factory=list)
subnet_mapping: List[LoadBalancerSubnetMapping] = field(default_factory=list)
class LoadBalancer(NetworkEntity):
"""
Attributes:
name: The name of the load balancer.
scheme_type: The scheme type (internal or internet-facing).
load_balancer_type: The type of the load balancer (network or application).
load_balancer_arn: The ARN of the load balancer.
target_groups: The target groups associated with this LB.
listener_ports: The ports the listeners associated with this LB are configured to.
"""
def __init__(self, account: str, region: str, name: str, scheme_type: LoadBalancerSchemeType,
load_balancer_type: LoadBalancerType, load_balancer_arn: str):
super().__init__(name, account, region, AwsServiceName.AWS_LOAD_BALANCER)
self.scheme_type: LoadBalancerSchemeType = scheme_type
self.load_balancer_type: LoadBalancerType = load_balancer_type
self.load_balancer_arn: str = load_balancer_arn
self.target_groups: List[LoadBalancerTargetGroup] = []
self.listener_ports: List[int] = []
self.raw_data = LoadBalancerRawData()
self.load_balancer_attributes: Optional[LoadBalancerAttributes] = None
def get_keys(self) -> List[str]:
return [self.load_balancer_arn]
def get_name(self) -> str:
return self.name
def get_id(self) -> str:
pass
def get_arn(self) -> str:
return self.load_balancer_arn
def get_extra_data(self) -> str:
load_balancer_type = 'type: {}'.format(self.load_balancer_type) if self.load_balancer_type else ''
subnet_ids = 'subnet_ids: {}'.format(self.network_resource.subnet_ids) if self.network_resource.subnet_ids else ''
security_group_ids = 'security_group_ids: {}'.format(self.network_resource.security_groups_ids)
return ', '.join([load_balancer_type, subnet_ids, security_group_ids])
def with_raw_data(self, subnets_ids: List[str] = None, security_groups_ids: List[str] = None, subnet_mapping: List[dict] = None) -> LoadBalancer:
subnet_mapping = subnet_mapping or []
self.raw_data = LoadBalancerRawData(subnets_ids or [], security_groups_ids or [],
[LoadBalancerSubnetMapping(x.get('allocation_id'),
x.get('private_ipv4_address'),
x['subnet_id']) for x in subnet_mapping])
return self
def get_cloud_resource_url(self) -> str:
return '{0}ec2/v2/home?region={1}#LoadBalancers' \
.format(self.AWS_CONSOLE_URL, self.region)
@property
def is_tagable(self) -> bool:
return True
| 41.538462
| 149
| 0.691534
|
c637b956a25d9528baf9149e323427291bb4c20a
| 35,620
|
py
|
Python
|
salt/modules/zfs.py
|
springborland/salt
|
bee85e477d57e9a171884e54fefb9a59d0835ed0
|
[
"Apache-2.0"
] | 1
|
2021-09-06T00:14:04.000Z
|
2021-09-06T00:14:04.000Z
|
salt/modules/zfs.py
|
springborland/salt
|
bee85e477d57e9a171884e54fefb9a59d0835ed0
|
[
"Apache-2.0"
] | 2
|
2021-04-30T21:17:57.000Z
|
2021-12-13T20:40:23.000Z
|
salt/modules/zfs.py
|
springborland/salt
|
bee85e477d57e9a171884e54fefb9a59d0835ed0
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Module for running ZFS command
:codeauthor: Nitin Madhok <nmadhok@clemson.edu>, Jorge Schrauwen <sjorge@blackdot.be>
:maintainer: Jorge Schrauwen <sjorge@blackdot.be>
:maturity: new
:depends: salt.utils.zfs
:platform: illumos,freebsd,linux
.. versionchanged:: 2018.3.1
Big refactor to remove duplicate code, better type converions and improved
consistancy in output.
"""
from __future__ import absolute_import, print_function, unicode_literals
# Import Python libs
import logging
import salt.modules.cmdmod
# Import Salt libs
import salt.utils.args
import salt.utils.path
import salt.utils.versions
from salt.ext.six.moves import zip
from salt.utils.odict import OrderedDict
__virtualname__ = "zfs"
log = logging.getLogger(__name__)
# Function alias to set mapping.
__func_alias__ = {
"list_": "list",
}
def __virtual__():
"""
Only load when the platform has zfs support
"""
if __grains__.get("zfs_support"):
return __virtualname__
else:
return False, "The zfs module cannot be loaded: zfs not supported"
def exists(name, **kwargs):
"""
Check if a ZFS filesystem or volume or snapshot exists.
name : string
name of dataset
type : string
also check if dataset is of a certain type, valid choices are:
filesystem, snapshot, volume, bookmark, or all.
.. versionadded:: 2015.5.0
CLI Example:
.. code-block:: bash
salt '*' zfs.exists myzpool/mydataset
salt '*' zfs.exists myzpool/myvolume type=volume
"""
## Configure command
# NOTE: initialize the defaults
opts = {}
# NOTE: set extra config from kwargs
if kwargs.get("type", False):
opts["-t"] = kwargs.get("type")
## Check if 'name' of 'type' exists
res = __salt__["cmd.run_all"](
__utils__["zfs.zfs_command"](command="list", opts=opts, target=name,),
python_shell=False,
ignore_retcode=True,
)
return res["retcode"] == 0
def create(name, **kwargs):
"""
Create a ZFS File System.
name : string
name of dataset or volume
volume_size : string
if specified, a zvol will be created instead of a dataset
sparse : boolean
create sparse volume
create_parent : boolean
creates all the non-existing parent datasets. any property specified on the
command line using the -o option is ignored.
properties : dict
additional zfs properties (-o)
.. note::
ZFS properties can be specified at the time of creation of the filesystem by
passing an additional argument called "properties" and specifying the properties
with their respective values in the form of a python dictionary::
properties="{'property1': 'value1', 'property2': 'value2'}"
.. versionadded:: 2015.5.0
CLI Example:
.. code-block:: bash
salt '*' zfs.create myzpool/mydataset [create_parent=True|False]
salt '*' zfs.create myzpool/mydataset properties="{'mountpoint': '/export/zfs', 'sharenfs': 'on'}"
salt '*' zfs.create myzpool/volume volume_size=1G [sparse=True|False]`
salt '*' zfs.create myzpool/volume volume_size=1G properties="{'volblocksize': '512'}" [sparse=True|False]
"""
## Configure command
# NOTE: initialize the defaults
flags = []
opts = {}
# NOTE: push filesystem properties
filesystem_properties = kwargs.get("properties", {})
# NOTE: set extra config from kwargs
if kwargs.get("create_parent", False):
flags.append("-p")
if kwargs.get("sparse", False) and kwargs.get("volume_size", None):
flags.append("-s")
if kwargs.get("volume_size", None):
opts["-V"] = __utils__["zfs.to_size"](
kwargs.get("volume_size"), convert_to_human=False
)
## Create filesystem/volume
res = __salt__["cmd.run_all"](
__utils__["zfs.zfs_command"](
command="create",
flags=flags,
opts=opts,
filesystem_properties=filesystem_properties,
target=name,
),
python_shell=False,
)
return __utils__["zfs.parse_command_result"](res, "created")
def destroy(name, **kwargs):
"""
Destroy a ZFS File System.
name : string
name of dataset, volume, or snapshot
force : boolean
force an unmount of any file systems using the unmount -f command.
recursive : boolean
recursively destroy all children. (-r)
recursive_all : boolean
recursively destroy all dependents, including cloned file systems
outside the target hierarchy. (-R)
.. warning::
watch out when using recursive and recursive_all
.. versionadded:: 2015.5.0
CLI Example:
.. code-block:: bash
salt '*' zfs.destroy myzpool/mydataset [force=True|False]
"""
## Configure command
# NOTE: initialize the defaults
flags = []
# NOTE: set extra config from kwargs
if kwargs.get("force", False):
flags.append("-f")
if kwargs.get("recursive_all", False):
flags.append("-R")
if kwargs.get("recursive", False):
flags.append("-r")
## Destroy filesystem/volume/snapshot/...
res = __salt__["cmd.run_all"](
__utils__["zfs.zfs_command"](command="destroy", flags=flags, target=name,),
python_shell=False,
)
return __utils__["zfs.parse_command_result"](res, "destroyed")
def rename(name, new_name, **kwargs):
"""
Rename or Relocate a ZFS File System.
name : string
name of dataset, volume, or snapshot
new_name : string
new name of dataset, volume, or snapshot
force : boolean
force unmount any filesystems that need to be unmounted in the process.
create_parent : boolean
creates all the nonexistent parent datasets. Datasets created in
this manner are automatically mounted according to the mountpoint
property inherited from their parent.
recursive : boolean
recursively rename the snapshots of all descendent datasets.
snapshots are the only dataset that can be renamed recursively.
.. versionadded:: 2015.5.0
CLI Example:
.. code-block:: bash
salt '*' zfs.rename myzpool/mydataset myzpool/renameddataset
"""
## Configure command
# NOTE: initialize the defaults
flags = []
target = []
# NOTE: set extra config from kwargs
if __utils__["zfs.is_snapshot"](name):
if kwargs.get("create_parent", False):
log.warning(
"zfs.rename - create_parent=True cannot be used with snapshots."
)
if kwargs.get("force", False):
log.warning("zfs.rename - force=True cannot be used with snapshots.")
if kwargs.get("recursive", False):
flags.append("-r")
else:
if kwargs.get("create_parent", False):
flags.append("-p")
if kwargs.get("force", False):
flags.append("-f")
if kwargs.get("recursive", False):
log.warning("zfs.rename - recursive=True can only be used with snapshots.")
# NOTE: update target
target.append(name)
target.append(new_name)
## Rename filesystem/volume/snapshot/...
res = __salt__["cmd.run_all"](
__utils__["zfs.zfs_command"](command="rename", flags=flags, target=target,),
python_shell=False,
)
return __utils__["zfs.parse_command_result"](res, "renamed")
def list_(name=None, **kwargs):
"""
Return a list of all datasets or a specified dataset on the system and the
values of their used, available, referenced, and mountpoint properties.
name : string
name of dataset, volume, or snapshot
recursive : boolean
recursively list children
depth : int
limit recursion to depth
properties : string
comma-separated list of properties to list, the name property will always be added
type : string
comma-separated list of types to display, where type is one of
filesystem, snapshot, volume, bookmark, or all.
sort : string
property to sort on (default = name)
order : string [ascending|descending]
sort order (default = ascending)
parsable : boolean
display numbers in parsable (exact) values
.. versionadded:: 2018.3.0
.. versionadded:: 2015.5.0
CLI Example:
.. code-block:: bash
salt '*' zfs.list
salt '*' zfs.list myzpool/mydataset [recursive=True|False]
salt '*' zfs.list myzpool/mydataset properties="sharenfs,mountpoint"
"""
ret = OrderedDict()
## update properties
# NOTE: properties should be a list
properties = kwargs.get("properties", "used,avail,refer,mountpoint")
if not isinstance(properties, list):
properties = properties.split(",")
# NOTE: name should be first property
# we loop here because there 'name' can be in the list
# multiple times.
while "name" in properties:
properties.remove("name")
properties.insert(0, "name")
## Configure command
# NOTE: initialize the defaults
flags = ["-H"]
opts = {}
# NOTE: set extra config from kwargs
if kwargs.get("recursive", False):
flags.append("-r")
if kwargs.get("recursive", False) and kwargs.get("depth", False):
opts["-d"] = kwargs.get("depth")
if kwargs.get("type", False):
opts["-t"] = kwargs.get("type")
kwargs_sort = kwargs.get("sort", False)
if kwargs_sort and kwargs_sort in properties:
if kwargs.get("order", "ascending").startswith("a"):
opts["-s"] = kwargs_sort
else:
opts["-S"] = kwargs_sort
if isinstance(properties, list):
# NOTE: There can be only one -o and it takes a comma-seperated list
opts["-o"] = ",".join(properties)
else:
opts["-o"] = properties
## parse zfs list
res = __salt__["cmd.run_all"](
__utils__["zfs.zfs_command"](
command="list", flags=flags, opts=opts, target=name,
),
python_shell=False,
)
if res["retcode"] == 0:
for ds in res["stdout"].splitlines():
if kwargs.get("parsable", True):
ds_data = __utils__["zfs.from_auto_dict"](
OrderedDict(list(zip(properties, ds.split("\t")))),
)
else:
ds_data = __utils__["zfs.to_auto_dict"](
OrderedDict(list(zip(properties, ds.split("\t")))),
convert_to_human=True,
)
ret[ds_data["name"]] = ds_data
del ret[ds_data["name"]]["name"]
else:
return __utils__["zfs.parse_command_result"](res)
return ret
def list_mount():
"""
List mounted zfs filesystems
.. versionadded:: 2018.3.1
CLI Example:
.. code-block:: bash
salt '*' zfs.list_mount
"""
## List mounted filesystem
res = __salt__["cmd.run_all"](
__utils__["zfs.zfs_command"](command="mount",), python_shell=False,
)
if res["retcode"] == 0:
ret = OrderedDict()
for mount in res["stdout"].splitlines():
mount = mount.split()
ret[mount[0]] = mount[-1]
return ret
else:
return __utils__["zfs.parse_command_result"](res)
def mount(name=None, **kwargs):
"""
Mounts ZFS file systems
name : string
name of the filesystem, having this set to None will mount all filesystems. (this is the default)
overlay : boolean
perform an overlay mount.
options : string
optional comma-separated list of mount options to use temporarily for
the duration of the mount.
.. versionadded:: 2016.3.0
.. versionchanged:: 2018.3.1
.. warning::
Passing '-a' as name is deprecated and will be removed in Sodium.
CLI Example:
.. code-block:: bash
salt '*' zfs.mount
salt '*' zfs.mount myzpool/mydataset
salt '*' zfs.mount myzpool/mydataset options=ro
"""
## Configure command
# NOTE: initialize the defaults
flags = []
opts = {}
# NOTE: set extra config from kwargs
if kwargs.get("overlay", False):
flags.append("-O")
if kwargs.get("options", False):
opts["-o"] = kwargs.get("options")
if name in [None, "-a"]:
# NOTE: the new way to mount all filesystems is to have name
# set to ```None```. We still accept the old '-a' until
# Sodium. After Sodium we can update the if statement
# to ```if not name:```
if name == "-a":
salt.utils.versions.warn_until(
"Sodium",
"Passing '-a' as name is deprecated as of Salt 2019.2.0. This "
"warning will be removed in Salt Sodium. Please pass name as "
"'None' instead to mount all filesystems.",
)
flags.append("-a")
name = None
## Mount filesystem
res = __salt__["cmd.run_all"](
__utils__["zfs.zfs_command"](
command="mount", flags=flags, opts=opts, target=name,
),
python_shell=False,
)
return __utils__["zfs.parse_command_result"](res, "mounted")
def unmount(name, **kwargs):
"""
Unmounts ZFS file systems
name : string
name of the filesystem, you can use None to unmount all mounted filesystems.
force : boolean
forcefully unmount the file system, even if it is currently in use.
.. warning::
Using ``-a`` for the name parameter will probably break your system, unless your rootfs is not on zfs.
.. versionadded:: 2016.3.0
.. versionchanged:: 2018.3.1
.. warning::
Passing '-a' as name is deprecated and will be removed in Sodium.
CLI Example:
.. code-block:: bash
salt '*' zfs.unmount myzpool/mydataset [force=True|False]
"""
## Configure command
# NOTE: initialize the defaults
flags = []
# NOTE: set extra config from kwargs
if kwargs.get("force", False):
flags.append("-f")
if name in [None, "-a"]:
# NOTE: still accept '-a' as name for backwards compatibility
# until Salt Sodium this should just simplify
# this to just set '-a' if name is not set.
flags.append("-a")
name = None
## Unmount filesystem
res = __salt__["cmd.run_all"](
__utils__["zfs.zfs_command"](command="unmount", flags=flags, target=name,),
python_shell=False,
)
return __utils__["zfs.parse_command_result"](res, "unmounted")
def inherit(prop, name, **kwargs):
"""
Clears the specified property
prop : string
name of property
name : string
name of the filesystem, volume, or snapshot
recursive : boolean
recursively inherit the given property for all children.
revert : boolean
revert the property to the received value if one exists; otherwise
operate as if the -S option was not specified.
.. versionadded:: 2016.3.0
CLI Example:
.. code-block:: bash
salt '*' zfs.inherit canmount myzpool/mydataset [recursive=True|False]
"""
## Configure command
# NOTE: initialize the defaults
flags = []
# NOTE: set extra config from kwargs
if kwargs.get("recursive", False):
flags.append("-r")
if kwargs.get("revert", False):
flags.append("-S")
## Inherit property
res = __salt__["cmd.run_all"](
__utils__["zfs.zfs_command"](
command="inherit", flags=flags, property_name=prop, target=name,
),
python_shell=False,
)
return __utils__["zfs.parse_command_result"](res, "inherited")
def diff(name_a, name_b=None, **kwargs):
"""
Display the difference between a snapshot of a given filesystem and
another snapshot of that filesystem from a later time or the current
contents of the filesystem.
name_a : string
name of snapshot
name_b : string
(optional) name of snapshot or filesystem
show_changetime : boolean
display the path's inode change time as the first column of output. (default = True)
show_indication : boolean
display an indication of the type of file. (default = True)
parsable : boolean
if true we don't parse the timestamp to a more readable date (default = True)
.. versionadded:: 2016.3.0
CLI Example:
.. code-block:: bash
salt '*' zfs.diff myzpool/mydataset@yesterday myzpool/mydataset
"""
## Configure command
# NOTE: initialize the defaults
flags = ["-H"]
target = []
# NOTE: set extra config from kwargs
if kwargs.get("show_changetime", True):
flags.append("-t")
if kwargs.get("show_indication", True):
flags.append("-F")
# NOTE: update target
target.append(name_a)
if name_b:
target.append(name_b)
## Diff filesystem/snapshot
res = __salt__["cmd.run_all"](
__utils__["zfs.zfs_command"](command="diff", flags=flags, target=target,),
python_shell=False,
)
if res["retcode"] != 0:
return __utils__["zfs.parse_command_result"](res)
else:
if not kwargs.get("parsable", True) and kwargs.get("show_changetime", True):
ret = OrderedDict()
for entry in res["stdout"].splitlines():
entry = entry.split()
entry_timestamp = __utils__["dateutils.strftime"](
entry[0], "%Y-%m-%d.%H:%M:%S.%f"
)
entry_data = "\t\t".join(entry[1:])
ret[entry_timestamp] = entry_data
else:
ret = res["stdout"].splitlines()
return ret
def rollback(name, **kwargs):
"""
Roll back the given dataset to a previous snapshot.
name : string
name of snapshot
recursive : boolean
destroy any snapshots and bookmarks more recent than the one
specified.
recursive_all : boolean
destroy any more recent snapshots and bookmarks, as well as any
clones of those snapshots.
force : boolean
used with the -R option to force an unmount of any clone file
systems that are to be destroyed.
.. warning::
When a dataset is rolled back, all data that has changed since
the snapshot is discarded, and the dataset reverts to the state
at the time of the snapshot. By default, the command refuses to
roll back to a snapshot other than the most recent one.
In order to do so, all intermediate snapshots and bookmarks
must be destroyed by specifying the -r option.
.. versionadded:: 2016.3.0
CLI Example:
.. code-block:: bash
salt '*' zfs.rollback myzpool/mydataset@yesterday
"""
## Configure command
# NOTE: initialize the defaults
flags = []
# NOTE: set extra config from kwargs
if kwargs.get("recursive_all", False):
flags.append("-R")
if kwargs.get("recursive", False):
flags.append("-r")
if kwargs.get("force", False):
if kwargs.get("recursive_all", False) or kwargs.get("recursive", False):
flags.append("-f")
else:
log.warning(
"zfs.rollback - force=True can only be used with recursive_all=True or recursive=True"
)
## Rollback to snapshot
res = __salt__["cmd.run_all"](
__utils__["zfs.zfs_command"](command="rollback", flags=flags, target=name,),
python_shell=False,
)
return __utils__["zfs.parse_command_result"](res, "rolledback")
def clone(name_a, name_b, **kwargs):
"""
Creates a clone of the given snapshot.
name_a : string
name of snapshot
name_b : string
name of filesystem or volume
create_parent : boolean
creates all the non-existing parent datasets. any property specified on the
command line using the -o option is ignored.
properties : dict
additional zfs properties (-o)
.. note::
ZFS properties can be specified at the time of creation of the filesystem by
passing an additional argument called "properties" and specifying the properties
with their respective values in the form of a python dictionary::
properties="{'property1': 'value1', 'property2': 'value2'}"
.. versionadded:: 2016.3.0
CLI Example:
.. code-block:: bash
salt '*' zfs.clone myzpool/mydataset@yesterday myzpool/mydataset_yesterday
"""
## Configure command
# NOTE: initialize the defaults
flags = []
target = []
# NOTE: push filesystem properties
filesystem_properties = kwargs.get("properties", {})
# NOTE: set extra config from kwargs
if kwargs.get("create_parent", False):
flags.append("-p")
# NOTE: update target
target.append(name_a)
target.append(name_b)
## Clone filesystem/volume
res = __salt__["cmd.run_all"](
__utils__["zfs.zfs_command"](
command="clone",
flags=flags,
filesystem_properties=filesystem_properties,
target=target,
),
python_shell=False,
)
return __utils__["zfs.parse_command_result"](res, "cloned")
def promote(name):
"""
Promotes a clone file system to no longer be dependent on its "origin"
snapshot.
.. note::
This makes it possible to destroy the file system that the
clone was created from. The clone parent-child dependency relationship
is reversed, so that the origin file system becomes a clone of the
specified file system.
The snapshot that was cloned, and any snapshots previous to this
snapshot, are now owned by the promoted clone. The space they use moves
from the origin file system to the promoted clone, so enough space must
be available to accommodate these snapshots. No new space is consumed
by this operation, but the space accounting is adjusted. The promoted
clone must not have any conflicting snapshot names of its own. The
rename subcommand can be used to rename any conflicting snapshots.
name : string
name of clone-filesystem
.. versionadded:: 2016.3.0
CLI Example:
.. code-block:: bash
salt '*' zfs.promote myzpool/myclone
"""
## Promote clone
res = __salt__["cmd.run_all"](
__utils__["zfs.zfs_command"](command="promote", target=name,),
python_shell=False,
)
return __utils__["zfs.parse_command_result"](res, "promoted")
def bookmark(snapshot, bookmark):
"""
Creates a bookmark of the given snapshot
.. note::
Bookmarks mark the point in time when the snapshot was created,
and can be used as the incremental source for a zfs send command.
This feature must be enabled to be used. See zpool-features(5) for
details on ZFS feature flags and the bookmarks feature.
snapshot : string
name of snapshot to bookmark
bookmark : string
name of bookmark
.. versionadded:: 2016.3.0
CLI Example:
.. code-block:: bash
salt '*' zfs.bookmark myzpool/mydataset@yesterday myzpool/mydataset#complete
"""
# abort if we do not have feature flags
if not __utils__["zfs.has_feature_flags"]():
return OrderedDict([("error", "bookmarks are not supported")])
## Configure command
# NOTE: initialize the defaults
target = []
# NOTE: update target
target.append(snapshot)
target.append(bookmark)
## Bookmark snapshot
res = __salt__["cmd.run_all"](
__utils__["zfs.zfs_command"](command="bookmark", target=target,),
python_shell=False,
)
return __utils__["zfs.parse_command_result"](res, "bookmarked")
def holds(snapshot, **kwargs):
"""
Lists all existing user references for the given snapshot or snapshots.
snapshot : string
name of snapshot
recursive : boolean
lists the holds that are set on the named descendent snapshots also.
.. versionadded:: 2016.3.0
CLI Example:
.. code-block:: bash
salt '*' zfs.holds myzpool/mydataset@baseline
"""
## Configure command
# NOTE: initialize the defaults
flags = ["-H"]
target = []
# NOTE: set extra config from kwargs
if kwargs.get("recursive", False):
flags.append("-r")
# NOTE: update target
target.append(snapshot)
## Lookup holds
res = __salt__["cmd.run_all"](
__utils__["zfs.zfs_command"](command="holds", flags=flags, target=target,),
python_shell=False,
)
ret = __utils__["zfs.parse_command_result"](res)
if res["retcode"] == 0:
for hold in res["stdout"].splitlines():
hold_data = OrderedDict(
list(zip(["name", "tag", "timestamp"], hold.split("\t"),))
)
ret[hold_data["tag"].strip()] = hold_data["timestamp"]
return ret
def hold(tag, *snapshot, **kwargs):
"""
Adds a single reference, named with the tag argument, to the specified
snapshot or snapshots.
.. note::
Each snapshot has its own tag namespace, and tags must be unique within that space.
If a hold exists on a snapshot, attempts to destroy that snapshot by
using the zfs destroy command return EBUSY.
tag : string
name of tag
snapshot : string
name of snapshot(s)
recursive : boolean
specifies that a hold with the given tag is applied recursively to
the snapshots of all descendent file systems.
.. versionadded:: 2016.3.0
.. versionchanged:: 2018.3.1
.. warning::
As of 2018.3.1 the tag parameter no longer accepts a comma-separated value.
It's is now possible to create a tag that contains a comma, this was impossible before.
CLI Example:
.. code-block:: bash
salt '*' zfs.hold mytag myzpool/mydataset@mysnapshot [recursive=True]
salt '*' zfs.hold mytag myzpool/mydataset@mysnapshot myzpool/mydataset@myothersnapshot
"""
## warn about tag change
if "," in tag:
salt.utils.versions.warn_until(
"Sodium",
"A comma-separated tag is no support as of Salt 2018.3.1 "
"This warning will be removed in Salt Sodium.",
)
## Configure command
# NOTE: initialize the defaults
flags = []
target = []
# NOTE: set extra config from kwargs
if kwargs.get("recursive", False):
flags.append("-r")
# NOTE: update target
target.append(tag)
target.extend(snapshot)
## hold snapshot
res = __salt__["cmd.run_all"](
__utils__["zfs.zfs_command"](command="hold", flags=flags, target=target,),
python_shell=False,
)
return __utils__["zfs.parse_command_result"](res, "held")
def release(tag, *snapshot, **kwargs):
"""
Removes a single reference, named with the tag argument, from the
specified snapshot or snapshots.
.. note::
The tag must already exist for each snapshot.
If a hold exists on a snapshot, attempts to destroy that
snapshot by using the zfs destroy command return EBUSY.
tag : string
name of tag
snapshot : string
name of snapshot(s)
recursive : boolean
recursively releases a hold with the given tag on the snapshots of
all descendent file systems.
.. versionadded:: 2016.3.0
.. versionchanged:: 2018.3.1
.. warning::
As of 2018.3.1 the tag parameter no longer accepts a comma-separated value.
It's is now possible to create a tag that contains a comma, this was impossible before.
CLI Example:
.. code-block:: bash
salt '*' zfs.release mytag myzpool/mydataset@mysnapshot [recursive=True]
salt '*' zfs.release mytag myzpool/mydataset@mysnapshot myzpool/mydataset@myothersnapshot
"""
## warn about tag change
if "," in tag:
salt.utils.versions.warn_until(
"Sodium",
"A comma-separated tag is no support as of Salt 2018.3.1 "
"This warning will be removed in Salt Sodium.",
)
## Configure command
# NOTE: initialize the defaults
flags = []
target = []
# NOTE: set extra config from kwargs
if kwargs.get("recursive", False):
flags.append("-r")
# NOTE: update target
target.append(tag)
target.extend(snapshot)
## release snapshot
res = __salt__["cmd.run_all"](
__utils__["zfs.zfs_command"](command="release", flags=flags, target=target,),
python_shell=False,
)
return __utils__["zfs.parse_command_result"](res, "released")
def snapshot(*snapshot, **kwargs):
"""
Creates snapshots with the given names.
snapshot : string
name of snapshot(s)
recursive : boolean
recursively create snapshots of all descendent datasets.
properties : dict
additional zfs properties (-o)
.. note::
ZFS properties can be specified at the time of creation of the filesystem by
passing an additional argument called "properties" and specifying the properties
with their respective values in the form of a python dictionary::
properties="{'property1': 'value1', 'property2': 'value2'}"
.. versionadded:: 2016.3.0
CLI Example:
.. code-block:: bash
salt '*' zfs.snapshot myzpool/mydataset@yesterday [recursive=True]
salt '*' zfs.snapshot myzpool/mydataset@yesterday myzpool/myotherdataset@yesterday [recursive=True]
"""
## Configure command
# NOTE: initialize the defaults
flags = []
# NOTE: push filesystem properties
filesystem_properties = kwargs.get("properties", {})
# NOTE: set extra config from kwargs
if kwargs.get("recursive", False):
flags.append("-r")
## Create snapshot
res = __salt__["cmd.run_all"](
__utils__["zfs.zfs_command"](
command="snapshot",
flags=flags,
filesystem_properties=filesystem_properties,
target=list(snapshot),
),
python_shell=False,
)
return __utils__["zfs.parse_command_result"](res, "snapshotted")
def set(*dataset, **kwargs):
"""
Sets the property or list of properties to the given value(s) for each dataset.
dataset : string
name of snapshot(s), filesystem(s), or volume(s)
properties : string
additional zfs properties pairs
.. note::
properties are passed as key-value pairs. e.g.
compression=off
.. note::
Only some properties can be edited.
See the Properties section for more information on what properties
can be set and acceptable values.
Numeric values can be specified as exact values, or in a human-readable
form with a suffix of B, K, M, G, T, P, E (for bytes, kilobytes,
megabytes, gigabytes, terabytes, petabytes, or exabytes respectively).
.. versionadded:: 2016.3.0
CLI Example:
.. code-block:: bash
salt '*' zfs.set myzpool/mydataset compression=off
salt '*' zfs.set myzpool/mydataset myzpool/myotherdataset compression=off
salt '*' zfs.set myzpool/mydataset myzpool/myotherdataset compression=lz4 canmount=off
"""
## Configure command
# NOTE: push filesystem properties
filesystem_properties = salt.utils.args.clean_kwargs(**kwargs)
## Set property
res = __salt__["cmd.run_all"](
__utils__["zfs.zfs_command"](
command="set",
property_name=list(filesystem_properties.keys()),
property_value=list(filesystem_properties.values()),
target=list(dataset),
),
python_shell=False,
)
return __utils__["zfs.parse_command_result"](res, "set")
def get(*dataset, **kwargs):
"""
Displays properties for the given datasets.
dataset : string
name of snapshot(s), filesystem(s), or volume(s)
properties : string
comma-separated list of properties to list, defaults to all
recursive : boolean
recursively list children
depth : int
recursively list children to depth
fields : string
comma-separated list of fields to include, the name and property field will always be added
type : string
comma-separated list of types to display, where type is one of
filesystem, snapshot, volume, bookmark, or all.
source : string
comma-separated list of sources to display. Must be one of the following:
local, default, inherited, temporary, and none. The default value is all sources.
parsable : boolean
display numbers in parsable (exact) values (default = True)
.. versionadded:: 2018.3.0
.. note::
If no datasets are specified, then the command displays properties
for all datasets on the system.
.. versionadded:: 2016.3.0
CLI Example:
.. code-block:: bash
salt '*' zfs.get
salt '*' zfs.get myzpool/mydataset [recursive=True|False]
salt '*' zfs.get myzpool/mydataset properties="sharenfs,mountpoint" [recursive=True|False]
salt '*' zfs.get myzpool/mydataset myzpool/myotherdataset properties=available fields=value depth=1
"""
## Configure command
# NOTE: initialize the defaults
flags = ["-H"]
opts = {}
# NOTE: set extra config from kwargs
if kwargs.get("depth", False):
opts["-d"] = kwargs.get("depth")
elif kwargs.get("recursive", False):
flags.append("-r")
fields = kwargs.get("fields", "value,source").split(",")
if "name" in fields: # ensure name is first
fields.remove("name")
if "property" in fields: # ensure property is second
fields.remove("property")
fields.insert(0, "name")
fields.insert(1, "property")
opts["-o"] = ",".join(fields)
if kwargs.get("type", False):
opts["-t"] = kwargs.get("type")
if kwargs.get("source", False):
opts["-s"] = kwargs.get("source")
# NOTE: set property_name
property_name = kwargs.get("properties", "all")
## Get properties
res = __salt__["cmd.run_all"](
__utils__["zfs.zfs_command"](
command="get",
flags=flags,
opts=opts,
property_name=property_name,
target=list(dataset),
),
python_shell=False,
)
ret = __utils__["zfs.parse_command_result"](res)
if res["retcode"] == 0:
for ds in res["stdout"].splitlines():
ds_data = OrderedDict(list(zip(fields, ds.split("\t"))))
if "value" in ds_data:
if kwargs.get("parsable", True):
ds_data["value"] = __utils__["zfs.from_auto"](
ds_data["property"], ds_data["value"],
)
else:
ds_data["value"] = __utils__["zfs.to_auto"](
ds_data["property"], ds_data["value"], convert_to_human=True,
)
if ds_data["name"] not in ret:
ret[ds_data["name"]] = OrderedDict()
ret[ds_data["name"]][ds_data["property"]] = ds_data
del ds_data["name"]
del ds_data["property"]
return ret
# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4
| 29.220673
| 114
| 0.620831
|
d995043703709e4b817076bdaffc5cf439c58a53
| 1,096
|
py
|
Python
|
examples/example_auto_scan.py
|
mauricioAyllon/NASA-gamma
|
14f53a626096d2bf0fce811608c9e59a8d6b5287
|
[
"MIT"
] | 5
|
2020-12-03T21:45:43.000Z
|
2021-12-04T16:14:33.000Z
|
examples/example_auto_scan.py
|
mauricioAyllon/NASA-gamma
|
14f53a626096d2bf0fce811608c9e59a8d6b5287
|
[
"MIT"
] | null | null | null |
examples/example_auto_scan.py
|
mauricioAyllon/NASA-gamma
|
14f53a626096d2bf0fce811608c9e59a8d6b5287
|
[
"MIT"
] | 3
|
2020-12-03T20:58:44.000Z
|
2022-03-24T04:52:57.000Z
|
# -*- coding: utf-8 -*-
"""
Created on Thu Oct 29 12:14:38 2020
@author: mauricio
Example using auto_scan
"""
from nasagamma import spectrum as sp
import numpy as np
import pandas as pd
from nasagamma import peaksearch as ps
from nasagamma import peakfit as pf
# dataset 1
file = "data/SSR-mcnp.hdf"
df = pd.read_hdf(file, key="data")
df = df.iloc[1:, :]
cts_np = df.cts.to_numpy() * 1e8
erg = np.array(df.index)
chan = np.arange(0, len(cts_np), 1)
# Required input parameters (in channels)
fwhm_at_0 = 1.0
ref_fwhm = 31
ref_x = 1220
min_snr = 1
# instantiate a Spectrum object
spect = sp.Spectrum(counts=cts_np)
# peaksearch class
search = ps.PeakSearch(spect, ref_x, ref_fwhm, fwhm_at_0, min_snr=min_snr)
## plot peak positions (channels)
search.plot_peaks()
## auto_scan
ranges_m = [
[8, 18],
[32, 44],
[72, 88],
[93, 101],
[127, 157],
[174, 193],
[277, 315],
[320, 353],
[414, 445],
[500, 698],
[700, 787],
[852, 902],
[986, 1035],
[1086, 1450],
]
peak_lst = pf.auto_scan(search, xlst=ranges_m, plot=False, save_to_hdf=False)
| 18.896552
| 77
| 0.65146
|
11c282ce5635d5fe214033ff058b7cb074b65973
| 629
|
py
|
Python
|
manage.py
|
hairinhi/exanctify
|
db4442f299c3d5c4e03c38e43b1399b5c53dd57f
|
[
"MIT"
] | null | null | null |
manage.py
|
hairinhi/exanctify
|
db4442f299c3d5c4e03c38e43b1399b5c53dd57f
|
[
"MIT"
] | null | null | null |
manage.py
|
hairinhi/exanctify
|
db4442f299c3d5c4e03c38e43b1399b5c53dd57f
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'exanctify.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| 28.590909
| 73
| 0.683625
|
62294cd1d36b0797fbaee8a379e64b5399241f60
| 4,824
|
py
|
Python
|
Spectral Clustering/code_material_python/helper.py
|
AmineKheldouni/Graphs-Machine-Learning
|
1b34ef38516d46e8ca61b1a8093e6c8fb76fe031
|
[
"MIT"
] | 1
|
2019-02-17T12:40:53.000Z
|
2019-02-17T12:40:53.000Z
|
Spectral Clustering/code_material_python/helper.py
|
AmineKheldouni/Graphs-Machine-Learning
|
1b34ef38516d46e8ca61b1a8093e6c8fb76fe031
|
[
"MIT"
] | null | null | null |
Spectral Clustering/code_material_python/helper.py
|
AmineKheldouni/Graphs-Machine-Learning
|
1b34ef38516d46e8ca61b1a8093e6c8fb76fe031
|
[
"MIT"
] | null | null | null |
# Prim's maximal spanning tree algorithm
# Prim's alg idea:
# start at any node, find closest neighbor and mark edges
# for all remaining nodes, find closest to previous cluster, mark edge
# continue until no nodes remain
#
# INPUTS: graph defined by adjacency matrix, nxn
# OUTPUTS: matrix specifying maximum spanning tree (subgraph), nxn
import matplotlib.pyplot as plt
import scipy
import numpy as np
import networkx as nx
#
# Other routines used: isConnected.m
# GB: Oct 7, 2012
#Copyright (c) 2013, Massachusetts Institute of Technology. All rights
#reserved. Redistribution and use in source and binary forms, with or without
#modification, are permitted provided that the following conditions are met:
#- Redistributions of source code must retain the above copyright notice, this
#list of conditions and the following disclaimer.
#- Redistributions in binary
#form must reproduce the above copyright notice, this list of conditions and
#the following disclaimer in the documentation and/or other materials provided
#with the distribution.
#- Neither the name of the Massachusetts Institute of
#Technology nor the names of its contributors may be used to endorse or promote
#products derived from this software without specific prior written permission.
#THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
#AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
#IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
#DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
#FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
#DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
#SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
#CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
#OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
#OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
def is_connected(adj,n):
# Uses the fact that multiplying the adj matrix to itself k times give the
# number of ways to get from i to j in k steps. If the end of the
# multiplication in the sum of all matrices there are 0 entries then the
# graph is disconnected. Computationally intensive, but can be sped up by
# the fact that in practice the diameter is very short compared to n, so it
# will terminate in order of log(n)? steps.
adjn=np.zeros((n,n))
adji=adj.copy()
for i in range(n):
adjn+=adji
adji=adji.dot(adj)
return len(np.where(adjn == 0)[0])==0
def max_span_tree(adj):
n=adj.shape[0]
if not(is_connected(adj,n)):
print('This graph is not connected. No spanning tree exists')
else:
tr=np.zeros((n,n))
adj[adj==0]=-np.inf
conn_nodes = [0]
rem_nodes = [i+1 for i in range(n-1)]
while len(rem_nodes)>0:
L=np.zeros(n)
L[conn_nodes]=1
L=L.reshape(n,1)
C=np.zeros(n)
C[rem_nodes]=1
C=C.reshape(1,n)
B=L.dot(C)
A=B*adj
i=np.where(A==np.max(A))[0][0]
j=np.where(A==np.max(A))[1][0]
tr[i,j]=1
tr[j,i]=1
conn_nodes+=[j]
rem_nodes.remove(j)
return tr.astype(int)
def plot_edges_and_points(X,Y,W,title=''):
colors=['go-','ro-','co-','ko-','yo-','mo-']
n=len(X)
G=nx.from_numpy_matrix(W)
nx.draw_networkx_edges(G,X)
for i in range(n):
plt.plot(X[i,0],X[i,1],colors[int(Y[i])])
plt.title(title)
plt.axis('equal')
def plot_graph_matrix(X,Y,W):
plt.figure()
plt.clf()
plt.subplot(1,2,1)
plot_edges_and_points(X,Y,W)
plt.subplot(1,2,2)
plt.imshow(W, extent=[0, 1, 0, 1])
plt.show()
def plot_clustering_result(X,Y,W,spectral_labels,kmeans_labels,normalized_switch=0):
plt.figure()
plt.clf()
plt.subplot(1,3,1)
plot_edges_and_points(X,Y,W,'ground truth')
plt.subplot(1,3,2)
if normalized_switch:
plot_edges_and_points(X,spectral_labels,W,'unnormalized laplacian')
else:
plot_edges_and_points(X,spectral_labels,W,'spectral clustering')
plt.subplot(1,3,3)
if normalized_switch:
plot_edges_and_points(X,kmeans_labels,W,'normalized laplacian')
else:
plot_edges_and_points(X,kmeans_labels,W,'k-means')
plt.show()
def plot_the_bend(X, Y, W, spectral_labels, eigenvalues_sorted):
plt.figure()
plt.clf()
plt.subplot(1,3,1)
plot_edges_and_points(X,Y,W,'ground truth')
plt.subplot(1,3,2);
plot_edges_and_points(X,spectral_labels,W,'spectral clustering')
plt.subplot(1,3,3);
plt.plot(np.arange(0,len(eigenvalues_sorted),1),eigenvalues_sorted,'v:')
plt.show()
| 36.270677
| 84
| 0.692164
|
2cff9d70975faf0d77c153dce33b893b99d9a21e
| 4,363
|
py
|
Python
|
nebula2/sclient/ScanResult.py
|
defp/nebula-python
|
425f4c6fcc836e3154326708b318ccda0753672f
|
[
"Apache-2.0"
] | null | null | null |
nebula2/sclient/ScanResult.py
|
defp/nebula-python
|
425f4c6fcc836e3154326708b318ccda0753672f
|
[
"Apache-2.0"
] | null | null | null |
nebula2/sclient/ScanResult.py
|
defp/nebula-python
|
425f4c6fcc836e3154326708b318ccda0753672f
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# --coding:utf-8--
# Copyright (c) 2020 vesoft inc. All rights reserved.
#
# This source code is licensed under Apache 2.0 License,
# attached with Common Clause Condition 1.0, found in the LICENSES directory.
import concurrent
import logging
from nebula2.sclient import (
PartManager,
do_scan_job,
PartInfo
)
from nebula2.sclient.BaseResult import (
BaseResult,
VertexData,
EdgeData
)
class VertexResult(BaseResult):
def __init__(self, data_sets, decode_type='utf-8'):
super().__init__(data_sets=data_sets,
decode_type=decode_type,
is_vertex=True)
def as_nodes(self):
"""
as_nodes
:return: list<Node>
"""
nodes = []
for data_set in self._data_sets:
for row in data_set.rows:
vertex_data = VertexData(row,
data_set.column_names,
self._decode_type)
nodes.append(vertex_data.as_node())
return nodes
class EdgeResult(BaseResult):
def __init__(self, data_sets: list, decode_type='utf-8'):
super().__init__(data_sets=data_sets,
decode_type=decode_type,
is_vertex=False)
def as_relationships(self):
"""
as_relationships
:return: list<Relationship>
"""
relationships = []
for data_set in self._data_sets:
for row in data_set.rows:
edge_data = EdgeData(row,
data_set.column_names,
self._decode_type)
relationships.append(edge_data.as_relationship())
return relationships
class ScanResult(object):
def __init__(self,
graph_storage_client,
req,
part_addrs,
partial_success=False,
is_vertex=True,
decode_type='utf-8'):
self._is_vertex = is_vertex
self._decode_type = decode_type
self._data_sets = []
self._graph_storage_client = graph_storage_client
self._partial_success = partial_success
self._req = req
part_infos = {}
for part_id in part_addrs.keys():
part_infos[part_id] = PartInfo(part_id, part_addrs[part_id])
self._parts_manager = PartManager(part_infos)
def has_next(self):
return self._parts_manager.has_next()
def next(self):
conns = self._graph_storage_client.get_conns()
num = len(conns)
logging.debug('Graph storage client num: {}'.format(num))
exceptions = []
result = []
with concurrent.futures.ThreadPoolExecutor(num) as executor:
do_scan = []
for i, conn in enumerate(conns):
future = executor.submit(do_scan_job,
conns[i],
self._parts_manager,
self._req,
self._is_vertex,
self._partial_success)
do_scan.append(future)
for future in concurrent.futures.as_completed(do_scan):
if future.exception() is not None:
logging.error(future.exception())
exceptions.append(future.exception())
else:
ret, data_sets = future.result()
if ret is not None:
logging.error('Scan failed: {}'.format(ret))
exceptions.append(RuntimeError('Scan failed: {}'.format(ret)))
continue
if len(data_sets) != 0:
result.extend(data_sets)
self._parts_manager.reset_jobs()
if len(exceptions) == 0:
if len(result) == 0:
logging.warning('Get empty result')
return None
else:
if self._is_vertex:
return VertexResult(result, self._decode_type)
else:
return EdgeResult(result, self._decode_type)
else:
raise exceptions[0]
| 33.561538
| 86
| 0.523264
|
3d2a3c95e7b595926c60022fedec674755fde7bf
| 177
|
py
|
Python
|
BootCRUDApp/urls.py
|
cs-fullstack-2019-spring/django-bootstrapcrud-cw-gkg901
|
c9543d77cbd2d28248fbf1f0577c0075330db8d7
|
[
"Apache-2.0"
] | null | null | null |
BootCRUDApp/urls.py
|
cs-fullstack-2019-spring/django-bootstrapcrud-cw-gkg901
|
c9543d77cbd2d28248fbf1f0577c0075330db8d7
|
[
"Apache-2.0"
] | null | null | null |
BootCRUDApp/urls.py
|
cs-fullstack-2019-spring/django-bootstrapcrud-cw-gkg901
|
c9543d77cbd2d28248fbf1f0577c0075330db8d7
|
[
"Apache-2.0"
] | null | null | null |
from django.urls import path
from . import views
urlpatterns = [
path('', views.index, name='index'),
path('editItem/<int:itemID>', views.editItem, name='editItem'),
]
| 22.125
| 67
| 0.672316
|
ee7b839672690424818093c14803c5cad1d05102
| 173
|
py
|
Python
|
diofant/printing/pretty/__init__.py
|
rajkk1/diofant
|
6b361334569e4ec2e8c7d30dc324387a4ad417c2
|
[
"BSD-3-Clause"
] | 57
|
2016-09-13T23:16:26.000Z
|
2022-03-29T06:45:51.000Z
|
diofant/printing/pretty/__init__.py
|
rajkk1/diofant
|
6b361334569e4ec2e8c7d30dc324387a4ad417c2
|
[
"BSD-3-Clause"
] | 402
|
2016-05-11T11:11:47.000Z
|
2022-03-31T14:27:02.000Z
|
diofant/printing/pretty/__init__.py
|
rajkk1/diofant
|
6b361334569e4ec2e8c7d30dc324387a4ad417c2
|
[
"BSD-3-Clause"
] | 20
|
2016-05-11T08:17:37.000Z
|
2021-09-10T09:15:51.000Z
|
"""ASCII-ART 2D pretty-printer"""
from .pretty import pprint, pprint_use_unicode, pretty, pretty_print
__all__ = 'pprint', 'pprint_use_unicode', 'pretty', 'pretty_print'
| 24.714286
| 68
| 0.751445
|
6892d7e9d24a4763e7d306a9a5011606ec2bcaaa
| 1,605
|
py
|
Python
|
examples/undocumented/python/kernel_director_linear.py
|
cloner1984/shogun
|
901c04b2c6550918acf0594ef8afeb5dcd840a7d
|
[
"BSD-3-Clause"
] | 2
|
2021-08-12T18:11:06.000Z
|
2021-11-17T10:56:49.000Z
|
examples/undocumented/python/kernel_director_linear.py
|
cloner1984/shogun
|
901c04b2c6550918acf0594ef8afeb5dcd840a7d
|
[
"BSD-3-Clause"
] | null | null | null |
examples/undocumented/python/kernel_director_linear.py
|
cloner1984/shogun
|
901c04b2c6550918acf0594ef8afeb5dcd840a7d
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python
import numpy
from shogun import RealFeatures, MSG_DEBUG
traindat = numpy.random.random_sample((10,10))
testdat = numpy.random.random_sample((10,10))
parameter_list=[[traindat,testdat,1.2],[traindat,testdat,1.4]]
def kernel_director_linear (fm_train_real=traindat,fm_test_real=testdat,scale=1.2):
try:
from shogun import DirectorKernel
except ImportError:
print("recompile shogun with --enable-swig-directors")
return
class DirectorLinearKernel(DirectorKernel):
def __init__(self):
DirectorKernel.__init__(self, True)
def kernel_function(self, idx_a, idx_b):
seq1 = self.get_lhs().get_feature_vector(idx_a)
seq2 = self.get_rhs().get_feature_vector(idx_b)
return numpy.dot(seq1, seq2)
from shogun import LinearKernel, AvgDiagKernelNormalizer
from shogun import Time
feats_train=RealFeatures(fm_train_real)
#feats_train.io.set_loglevel(MSG_DEBUG)
feats_train.parallel.set_num_threads(1)
feats_test=RealFeatures(fm_test_real)
kernel=LinearKernel()
kernel.set_normalizer(AvgDiagKernelNormalizer(scale))
kernel.init(feats_train, feats_train)
dkernel=DirectorLinearKernel()
dkernel.set_normalizer(AvgDiagKernelNormalizer(scale))
dkernel.init(feats_train, feats_train)
#print "km_train"
t=Time()
km_train=kernel.get_kernel_matrix()
#t1=t.cur_time_diff(True)
#print "dkm_train"
t=Time()
dkm_train=dkernel.get_kernel_matrix()
#t2=t.cur_time_diff(True)
#print "km_train", km_train
#print "dkm_train", dkm_train
return km_train, dkm_train
if __name__=='__main__':
print('DirectorLinear')
kernel_director_linear(*parameter_list[0])
| 27.672414
| 83
| 0.788785
|
91e842428712607131765c35addee7ad2c361965
| 7,672
|
py
|
Python
|
katdal/test/test_sensordata.py
|
kernsuite-debian/katdal
|
62c5732f0bf1a2b6fa8d275e7405bcb1648534b0
|
[
"BSD-3-Clause"
] | null | null | null |
katdal/test/test_sensordata.py
|
kernsuite-debian/katdal
|
62c5732f0bf1a2b6fa8d275e7405bcb1648534b0
|
[
"BSD-3-Clause"
] | null | null | null |
katdal/test/test_sensordata.py
|
kernsuite-debian/katdal
|
62c5732f0bf1a2b6fa8d275e7405bcb1648534b0
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8
################################################################################
# Copyright (c) 2018-2019, National Research Foundation (Square Kilometre Array)
#
# Licensed under the BSD 3-Clause License (the "License"); you may not use
# this file except in compliance with the License. You may obtain a copy
# of the License at
#
# https://opensource.org/licenses/BSD-3-Clause
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
################################################################################
"""Tests for :py:mod:`katdal.sensordata`."""
from __future__ import print_function, division, absolute_import
from builtins import object
from collections import OrderedDict
import numpy as np
from nose.tools import assert_equal, assert_in, assert_not_in, assert_raises, assert_is_instance
import mock
from katdal.sensordata import (SensorCache, SensorData, SimpleSensorGetter, to_str,
remove_duplicates_and_invalid_values)
def assert_equal_typed(a, b):
assert_equal(a, b)
assert_equal(type(a), type(b))
class TestToStr(object):
def test_non_str(self):
assert_equal_typed(to_str(3), 3)
assert_equal_typed(to_str(None), None)
def test_simple_str(self):
assert_equal_typed(to_str(b'hello'), 'hello')
assert_equal_typed(to_str(u'hello'), 'hello')
def test_non_ascii(self):
assert_equal_typed(to_str(b'caf\xc3\xa9'), 'café')
assert_equal_typed(to_str(u'café'), 'café')
def test_list(self):
assert_equal_typed(to_str([b'hello', u'world']), ['hello', 'world'])
def test_tuple(self):
assert_equal_typed(to_str((b'hello', u'world')), ('hello', 'world'))
def test_dict(self):
assert_equal_typed(to_str({b'hello': b'world', u'abc': u'xyz'}),
{'hello': 'world', 'abc': 'xyz'})
def test_custom_dict(self):
assert_equal_typed(to_str(OrderedDict([(b'hello', b'world'), (u'abc', u'xyz')])),
OrderedDict([('hello', 'world'), ('abc', 'xyz')]))
def test_numpy_str(self):
a = np.array([[b'abc', b'def'], [b'ghi', b'jk']])
b = np.array([[u'abc', u'def'], [u'ghi', u'jk']])
c = np.array([['abc', 'def'], ['ghi', 'jk']])
np.testing.assert_array_equal(to_str(a), c)
np.testing.assert_array_equal(to_str(b), c)
def test_numpy_object(self):
a = np.array([b'abc', u'def', (b'xyz', u'uvw')], dtype='O')
b = np.array(['abc', 'def', ('xyz', 'uvw')], dtype='O')
np.testing.assert_array_equal(to_str(a), b)
class TestSensorCache(object):
def _cache_data(self):
sensors = [
('foo', [4.0, 7.0], [3.0, 6.0]),
('cat', [2.0, 6.0], ['hello', 'world'])
]
cache_data = {}
for name, ts, values in sensors:
sd = SimpleSensorGetter(name, np.asarray(ts), np.asarray(values))
cache_data[name] = sd
return cache_data
def setup(self):
self.cache = SensorCache(self._cache_data(), timestamps=np.arange(10.), dump_period=1.0)
def test_extract_float(self):
data = self.cache.get('foo', extract=True)
np.testing.assert_array_equal(data, [3.0, 3.0, 3.0, 3.0, 3.0, 4.0, 5.0, 6.0, 6.0, 6.0])
def test_extract_categorical(self):
data = self.cache.get('cat', extract=True)
H = 'hello'
W = 'world'
np.testing.assert_array_equal(data[:], [H, H, H, H, H, H, W, W, W, W])
def test_alias(self):
self.cache = SensorCache(
self._cache_data(), timestamps=np.arange(10.), dump_period=1.0,
aliases={'zz': 'at'})
# Check that adding the alias didn't lead to extraction
assert_is_instance(self.cache.get('czz', extract=False), SimpleSensorGetter)
np.testing.assert_array_equal(self.cache['czz'], self.cache['cat'])
def test_len(self):
assert_equal(len(self.cache), 2)
def test_keys(self):
assert_equal(sorted(self.cache.keys()), ['cat', 'foo'])
def test_contains(self):
assert_in('cat', self.cache)
assert_in('foo', self.cache)
assert_not_in('dog', self.cache)
template = 'Antennas/{ant}/{param1}_{param2}'
self.cache.virtual[template] = lambda x: None
assert_not_in(template, self.cache)
def test_setitem_delitem(self):
self.cache['bar'] = SimpleSensorGetter('bar', np.array([1.0]), np.array([0.0]))
np.testing.assert_array_equal(self.cache['bar'], np.zeros(10))
del self.cache['bar']
assert_not_in('bar', self.cache)
def test_sensor_time_offset(self):
data = self.cache.get('foo', extract=True, time_offset=-1.0)
np.testing.assert_array_equal(data, [3.0, 3.0, 3.0, 3.0, 4.0, 5.0, 6.0, 6.0, 6.0, 6.0])
def test_virtual_sensors(self):
calculate_value = mock.Mock()
def _check_sensor(cache, name, **kwargs):
"""Check that virtual sensor function gets the expected parameters."""
assert_equal(kwargs, params)
calculate_value()
value = kwargs['param2']
cache[name] = value
return value
# Set up a virtual sensor and trigger it to get a value
params = {'ant': 'm000', 'param1': 'one', 'param2': 'two'}
template = 'Antennas/{ant}/{param1}_{param2}'
self.cache.virtual[template] = _check_sensor
value = self.cache.get(template.format(**params))
assert_equal(value, params['param2'])
assert_equal(calculate_value.call_count, 1)
# Check that the value was taken from the cache the second time around
value = self.cache.get(template.format(**params))
assert_equal(value, params['param2'])
assert_equal(calculate_value.call_count, 1)
# If your parameter values contain underscores, don't use it as delimiter
params = {'ant': 'm000', 'param1': 'one', 'param2': 'two_three'}
with assert_raises(AssertionError):
self.cache.get(template.format(**params))
template = 'Antennas/{ant}/{param1}/{param2}'
# The updated template has not yet been added to the cache
with assert_raises(KeyError):
self.cache.get(template.format(**params))
self.cache.virtual[template] = _check_sensor
value = self.cache.get(template.format(**params))
assert_equal(value, params['param2'])
assert_equal(calculate_value.call_count, 2)
# TODO: more tests required:
# - extract=False
# - selection
def test_sensor_cleanup():
# The first sensor event has a status of "unknown" and is therefore invalid. It happened
# after the second (valid) event, though, and snuck through due to a bug (now fixed).
# This mirrors the behaviour of the cbf_1_wide_input_labelling sensor in CBID 1588667937.
timestamp = np.array([1.0, 0.0, 3.0, 3.0, 3.0, 3.0, 2.0])
value = np.array(['broke', 'a', 'c', 'c', 'c', 'd', 'b'])
status = np.array(['unknown', 'nominal', 'nominal', 'nominal', 'warn', 'error', 'nominal'])
dirty = SensorData('test', timestamp, value, status)
clean = remove_duplicates_and_invalid_values(dirty)
assert_equal(clean.status, None)
np.testing.assert_array_equal(clean.value, np.array(['a', 'b', 'd']))
np.testing.assert_array_equal(clean.timestamp, np.array([0.0, 2.0, 3.0]))
| 41.026738
| 96
| 0.616528
|
182caa3a57e96ac36a4e5e356d79c06f739bcede
| 4,100
|
py
|
Python
|
experiments/toydatagen.py
|
mossjacob/reggae
|
673b483731e6f162f41c3b52dc30f6a0df4684bf
|
[
"MIT"
] | 1
|
2021-02-13T11:00:29.000Z
|
2021-02-13T11:00:29.000Z
|
experiments/toydatagen.py
|
mossjacob/reggae
|
673b483731e6f162f41c3b52dc30f6a0df4684bf
|
[
"MIT"
] | null | null | null |
experiments/toydatagen.py
|
mossjacob/reggae
|
673b483731e6f162f41c3b52dc30f6a0df4684bf
|
[
"MIT"
] | null | null | null |
import torch
import numpy as np
import pandas as pd
from pathlib import Path
from alfi.datasets import HomogeneousReactionDiffusion, ReactionDiffusionGenerator
def save_dataset(toydata):
"""
data_dir: the directory where the toy data and intermediate data lies. Will also be saved here.
"""
data_dir = '../data'
temp = pd.read_csv(Path(data_dir) / 'demToy1GPmRNA.csv').values
t_sorted = np.argsort(temp[:, 0], kind='mergesort')
# toydata = torch.load(Path(data_dir) / 'intermediate_toydata.pt')
params_list = list()
orig_data = list()
num_samples = toydata[0]['samples'].shape[0]
x_observed = torch.tensor(temp[t_sorted, 0:2]).permute(1, 0)
for i in range(len(toydata)):
params = torch.tensor([toydata[i][key] for key in ['l1', 'l2', 'sensitivity', 'decay', 'diffusion']])
samples = toydata[i]['samples']
for sample in range(num_samples):
lf = samples[sample, 1681:]
out = samples[sample, :1681]
lf_out = torch.stack([lf, out], dim=0)
orig_data.append(lf_out)
params_list.append(params)
params = torch.stack(params_list)
orig_data = torch.stack(orig_data)
shuffle = torch.randperm(orig_data.size()[0])
orig_data = orig_data[shuffle]
params = params[shuffle]
torch.save({'x_observed': x_observed, 'orig_data': orig_data, 'params': params}, Path(data_dir) / 'toydata.pt')
if __name__ == '__main__':
dataset = HomogeneousReactionDiffusion(data_dir='./data/', nn_format=False)
tx = torch.tensor(dataset.orig_data[0]).t()
with torch.no_grad():
tot = 4 * 4 * 5 * 5
i = 0
objects = list()
for sensitivity in np.linspace(0.2, 0.9, 6):
for l1 in np.linspace(0.1, 0.4, 5):
for l2 in np.linspace(0.1, 0.4, 5):
for diffusion in np.linspace(0.001, 0.1, 6):
for decay in np.linspace(0.01, 0.4, 5):
kernel = ReactionDiffusionGenerator(
lengthscale=[l1, l2],
decay=decay,
diffusion=diffusion,
sensitivity=sensitivity
)
Kuu, Kyy, Kyu, Kuy = kernel.joint(tx, tx)
kern = torch.zeros((2 * 1681, 2 * 1681))
kern[:1681, :1681] = Kyy
kern[:1681, 1681:] = Kyu
kern[1681:, :1681] = Kuy
kern[1681:, 1681:] = Kuu
try:
eigval, eigvec = torch.symeig(kern, eigenvectors=True)
except:
print('Failed for', l1, l2, sensitivity, decay, diffusion)
continue
eps = -1e-5
num = torch.sum((~(eigval >= eps)).type(torch.int)).item()
if num > 30:
print('Failed for', l1, l2, sensitivity, decay, diffusion)
continue
eigval_root = eigval.clamp_min(0.0).sqrt()
corr_matrix = (eigvec * eigval_root).transpose(-1, -2)
a = torch.randn(torch.Size([50, 2 * 1681]))
samples = a @ corr_matrix
obj = {
'samples': samples.clone(),
'sensitivity': sensitivity,
'l1': l1,
'l2': l2,
'diffusion': diffusion,
'decay': decay
}
objects.append(obj)
i += 1
print('Done ', i, '/', tot)
# torch.save(, 'intermediate_toydata.pt')
save_dataset(objects)
print('Saved')
| 44.565217
| 115
| 0.468293
|
0db3ae1d4f6a7bc6ec87b06f82fbfc86f5b46f26
| 8,323
|
py
|
Python
|
core/controllers/blog_admin_test.py
|
WebFlakyTest/oppia
|
520e35490eae8171beb035fbafc2948983abec75
|
[
"Apache-2.0"
] | 1
|
2021-08-17T20:33:12.000Z
|
2021-08-17T20:33:12.000Z
|
core/controllers/blog_admin_test.py
|
WebFlakyTest/oppia
|
520e35490eae8171beb035fbafc2948983abec75
|
[
"Apache-2.0"
] | null | null | null |
core/controllers/blog_admin_test.py
|
WebFlakyTest/oppia
|
520e35490eae8171beb035fbafc2948983abec75
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2021 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for the blog admin page."""
from __future__ import absolute_import # pylint: disable=import-only-modules
from __future__ import unicode_literals # pylint: disable=import-only-modules
import logging
from core.domain import config_domain
from core.domain import config_services
from core.tests import test_utils
import feconf
class BlogAdminPageTests(test_utils.GenericTestBase):
"""Checks the access to the blog admin page and its rendering."""
def test_blog_admin_page_access_without_logging_in(self):
"""Tests access to the Blog Admin page."""
self.get_html_response('/blog-admin', expected_status_int=302)
def test_blog_admin_page_acess_without_being_blog_admin(self):
self.signup(self.VIEWER_EMAIL, self.VIEWER_USERNAME)
self.login(self.VIEWER_EMAIL)
self.get_html_response('/blog-admin', expected_status_int=401)
self.logout()
def test_blog_admin_page_acess_as_blog_admin(self):
self.signup(self.BLOG_ADMIN_EMAIL, self.BLOG_ADMIN_USERNAME)
self.set_user_role(
self.BLOG_ADMIN_USERNAME, feconf.ROLE_ID_BLOG_ADMIN)
self.login(self.BLOG_ADMIN_EMAIL)
self.get_html_response('/blog-admin')
self.logout()
class BlogAdminRolesHandlerTest(test_utils.GenericTestBase):
"""Checks the user role handling on the blog admin page."""
def setUp(self):
"""Complete the signup process for self.ADMIN_EMAIL."""
super(BlogAdminRolesHandlerTest, self).setUp()
self.signup(
self.BLOG_ADMIN_EMAIL, self.BLOG_ADMIN_USERNAME)
self.set_user_role(
self.BLOG_ADMIN_USERNAME,
feconf.ROLE_ID_BLOG_ADMIN)
def test_updating_and_removing_blog_editor_role_successfully(self):
user_email = 'user1@example.com'
username = 'user1'
self.signup(user_email, username)
self.login(self.BLOG_ADMIN_EMAIL)
# Check role correctly gets updated.
csrf_token = self.get_new_csrf_token()
response_dict = self.post_json(
feconf.BLOG_ADMIN_ROLE_HANDLER_URL,
{
'role': feconf.ROLE_ID_BLOG_ADMIN,
'username': username
},
csrf_token=csrf_token,
expected_status_int=200)
self.assertEqual(response_dict, {})
# Check removing user from blog editor role.
csrf_token = self.get_new_csrf_token()
response_dict = self.put_json(
feconf.BLOG_ADMIN_ROLE_HANDLER_URL,
{'username': username},
csrf_token=csrf_token,
expected_status_int=200)
self.assertEqual(response_dict, {})
def test_updating_blog_editor_role_for_invalid_user(self):
username = 'invaliduser'
self.login(self.BLOG_ADMIN_EMAIL)
csrf_token = self.get_new_csrf_token()
self.post_json(
feconf.BLOG_ADMIN_ROLE_HANDLER_URL,
{
'role': feconf.ROLE_ID_BLOG_ADMIN,
'username': username
},
csrf_token=csrf_token,
expected_status_int=400)
def test_removing_blog_editor_role_for_invalid_user(self):
username = 'invaliduser'
self.login(self.BLOG_ADMIN_EMAIL)
csrf_token = self.get_new_csrf_token()
self.put_json(
feconf.BLOG_ADMIN_ROLE_HANDLER_URL,
{'username': username},
csrf_token=csrf_token,
expected_status_int=400)
csrf_token = self.get_new_csrf_token()
self.put_json(
feconf.BLOG_ADMIN_ROLE_HANDLER_URL,
{},
csrf_token=csrf_token,
expected_status_int=400)
class BlogAdminHandlerTest(test_utils.GenericTestBase):
"""Checks the user role handling on the blog admin page."""
def setUp(self):
"""Complete the signup process for self.ADMIN_EMAIL."""
super(BlogAdminHandlerTest, self).setUp()
self.signup(
self.BLOG_ADMIN_EMAIL, self.BLOG_ADMIN_USERNAME)
self.set_user_role(
self.BLOG_ADMIN_USERNAME,
feconf.ROLE_ID_BLOG_ADMIN)
self.blog_admin_id = self.get_user_id_from_email(self.BLOG_ADMIN_EMAIL)
def test_update_configuration_property(self):
"""Test that configuration properties can be updated."""
self.login(self.BLOG_ADMIN_EMAIL)
csrf_token = self.get_new_csrf_token()
new_config_value = 20
response_dict = self.get_json('/blogadminhandler')
response_config_properties = response_dict['config_properties']
self.assertDictContainsSubset({
'value': 10,
}, response_config_properties[
config_domain.MAX_NUMBER_OF_TAGS_ASSIGNED_TO_BLOG_POST.name])
payload = {
'action': 'save_config_properties',
'new_config_property_values': {
config_domain.MAX_NUMBER_OF_TAGS_ASSIGNED_TO_BLOG_POST.name: (
new_config_value),
}
}
self.post_json('/blogadminhandler', payload, csrf_token=csrf_token)
response_dict = self.get_json('/blogadminhandler')
response_config_properties = response_dict['config_properties']
self.assertDictContainsSubset({
'value': new_config_value,
}, response_config_properties[
config_domain.MAX_NUMBER_OF_TAGS_ASSIGNED_TO_BLOG_POST.name])
self.logout()
def test_revert_config_property(self):
observed_log_messages = []
def _mock_logging_function(msg, *args):
"""Mocks logging.info()."""
observed_log_messages.append(msg % args)
self.login(self.BLOG_ADMIN_EMAIL)
csrf_token = self.get_new_csrf_token()
config_services.set_property(
self.blog_admin_id,
'max_number_of_tags_assigned_to_blog_post',
20)
self.assertEqual(
config_domain.MAX_NUMBER_OF_TAGS_ASSIGNED_TO_BLOG_POST.value, 20)
with self.swap(logging, 'info', _mock_logging_function):
self.post_json(
'/blogadminhandler', {
'action': 'revert_config_property',
'config_property_id':
'max_number_of_tags_assigned_to_blog_post',
}, csrf_token=csrf_token)
self.assertFalse(config_domain.PROMO_BAR_ENABLED.value)
self.assertEqual(
observed_log_messages,
['[BLOG ADMIN] %s reverted config property:'
' max_number_of_tags_assigned_to_blog_post'
% self.blog_admin_id])
self.logout()
def test_invalid_values_for_updating_config_properties(self):
self.login(self.BLOG_ADMIN_EMAIL)
csrf_token = self.get_new_csrf_token()
new_config_value = [20]
response_dict = self.get_json('/blogadminhandler')
response_config_properties = response_dict['config_properties']
self.assertDictContainsSubset({
'value': 10,
}, response_config_properties[
config_domain.MAX_NUMBER_OF_TAGS_ASSIGNED_TO_BLOG_POST.name])
payload = {
'action': 'save_config_properties',
'new_config_property_values': {
config_domain.MAX_NUMBER_OF_TAGS_ASSIGNED_TO_BLOG_POST.name: (
new_config_value),
}
}
response_dict = self.post_json(
'/blogadminhandler', payload, csrf_token=csrf_token,
expected_status_int=400)
self.assertEqual(
response_dict['error'], 'Schema validation for \'new_config_'
'property_values\' failed: Could not convert list to int: [20]')
| 35.875
| 79
| 0.66106
|
ebb0d5947008666bb6e1fc068a0af0ca00273b6f
| 23
|
py
|
Python
|
fastai/__init__.py
|
rajesh-ibm-power/fastai
|
2c0bd5694226604d15b82a4f7ae2be69bc846130
|
[
"Apache-2.0"
] | null | null | null |
fastai/__init__.py
|
rajesh-ibm-power/fastai
|
2c0bd5694226604d15b82a4f7ae2be69bc846130
|
[
"Apache-2.0"
] | null | null | null |
fastai/__init__.py
|
rajesh-ibm-power/fastai
|
2c0bd5694226604d15b82a4f7ae2be69bc846130
|
[
"Apache-2.0"
] | null | null | null |
__version__ = "2.2.4"
| 7.666667
| 21
| 0.608696
|
b3efd41d02b0da58acb299b4cc63bb8396ac00ec
| 4,231
|
py
|
Python
|
Interviewbook/views.py
|
kmAyush/IntReview
|
b96d5249d8b0c7987f533fe345fef470b9ea6435
|
[
"MIT"
] | 2
|
2020-01-20T08:55:42.000Z
|
2020-08-01T16:44:57.000Z
|
Interviewbook/views.py
|
kmAyush/IntReview
|
b96d5249d8b0c7987f533fe345fef470b9ea6435
|
[
"MIT"
] | 28
|
2019-01-29T18:35:23.000Z
|
2022-02-10T08:34:28.000Z
|
Interviewbook/views.py
|
kmAyush/IntReview
|
b96d5249d8b0c7987f533fe345fef470b9ea6435
|
[
"MIT"
] | 15
|
2019-01-31T13:47:51.000Z
|
2020-09-25T13:35:14.000Z
|
from django.http import HttpResponse, HttpResponseRedirect
from .forms import *
from django.shortcuts import redirect,render,get_object_or_404,reverse
from django.urls import reverse
from django.utils import timezone
from .models import *
from django.core.paginator import EmptyPage, PageNotAnInteger, Paginator
from django.contrib.auth import login, authenticate,logout
from django.contrib.auth.decorators import login_required
from django.contrib.auth.forms import UserCreationForm
from django.contrib.auth.models import User
def index(request):
return render(request, 'Interviewbook/index.html')
def signup(request):
if request.method == 'POST':
form = UserCreationForm(request.POST)
if form.is_valid():
form.save()
username = form.cleaned_data.get('username')
raw_password = form.cleaned_data.get('password1')
user = authenticate(username=username, password=raw_password)
login(request, user)
return redirect('index')
else:
form = UserCreationForm()
return render(request, 'Interviewbook/signup.html', {'form': form})
def login_view(request):
_message = False
if request.method == 'POST':
_username = request.POST['username']
_password = request.POST['password']
user = authenticate(username=_username, password=_password)
if user is not None:
if user.is_active:
login(request,user)
return redirect('index')
else:
_message = 'Your account is not activated'
else:
_message = 'Invalid login, please try again.'
context = {'message': _message}
return render(request, 'Interviewbook/login.html', context)
def logout_view(request):
logout(request)
return redirect('index')
def ListResponses(request):
responses = InterviewResponse.objects.filter(timestamp__lte=timezone.now()).order_by('-timestamp')
paginator = Paginator(responses, 10) # Show 10 responses per page
page = request.GET.get('page')
responses = paginator.get_page(page)
return render(request, 'Interviewbook/responses.html', {'responses': responses})
def ListResponsesbyCompany(request):
query=request.GET['company']
company= get_object_or_404(Company, name=query)
responses = InterviewResponse.objects.filter(company= company.id).order_by('-hits')
paginator = Paginator(responses, 10) # Show 10 responses per page
page = request.GET.get('page')
responses = paginator.get_page(page)
return render(request, 'Interviewbook/responses.html', {'responses': responses})
def viewResponse(request, response_id):
response = get_object_or_404(InterviewResponse, id=response_id)
response.increase()
return render(request, 'Interviewbook/response.html', {'response': response})
@login_required(login_url='login')
def updateResponse(request, response_id):
instance = get_object_or_404(InterviewResponse, id=response_id)
form = ResponseForm(request.POST or None, instance=instance)
if form.is_valid() and instance.name.pk==request.user.pk:
form.save()
return redirect('index')
return render(request, 'Interviewbook/Responseform.html', {'form': form})
@login_required(login_url='login')
def deleteResponse(request,response_id):
instance = get_object_or_404(InterviewResponse, id=response_id)
if instance.name.pk==request.user.pk:
instance.delete()
return redirect('ListResponses')
def response_new(request):
if request.method == "POST":
form = ResponseForm(request.POST)
if form.is_valid() and request.user.is_authenticated:
form.save(user_id=request.user.pk)
return redirect('index')
else:
form = ResponseForm()
return render(request, 'Interviewbook/Responseform.html', {'form': form})
@login_required(login_url='login')
def add_company(request):
if request.method == "POST":
form = CompanyForm(request.POST)
if form.is_valid():
form.save()
return redirect('response_new')
else:
form = CompanyForm()
return render(request, 'Interviewbook/CompanyForm.html', {'form': form})
| 38.463636
| 102
| 0.695817
|
fc2d78978ffa6f7afa4ac7e68776355204768014
| 70
|
py
|
Python
|
data/studio21_generated/interview/1873/starter_code.py
|
vijaykumawat256/Prompt-Summarization
|
614f5911e2acd2933440d909de2b4f86653dc214
|
[
"Apache-2.0"
] | null | null | null |
data/studio21_generated/interview/1873/starter_code.py
|
vijaykumawat256/Prompt-Summarization
|
614f5911e2acd2933440d909de2b4f86653dc214
|
[
"Apache-2.0"
] | null | null | null |
data/studio21_generated/interview/1873/starter_code.py
|
vijaykumawat256/Prompt-Summarization
|
614f5911e2acd2933440d909de2b4f86653dc214
|
[
"Apache-2.0"
] | null | null | null |
class Solution:
def preimageSizeFZF(self, K: int) -> int:
| 23.333333
| 45
| 0.6
|
ded944301d08fa2a1292bbd0ba0a7c48ad6dfe2b
| 86,798
|
py
|
Python
|
kubernetes_asyncio/client/api/certificates_v1beta1_api.py
|
aK0nshin/kubernetes_asyncio
|
aef9edcc1f8671a5b1bba9f4684bde890176b19c
|
[
"Apache-2.0"
] | null | null | null |
kubernetes_asyncio/client/api/certificates_v1beta1_api.py
|
aK0nshin/kubernetes_asyncio
|
aef9edcc1f8671a5b1bba9f4684bde890176b19c
|
[
"Apache-2.0"
] | null | null | null |
kubernetes_asyncio/client/api/certificates_v1beta1_api.py
|
aK0nshin/kubernetes_asyncio
|
aef9edcc1f8671a5b1bba9f4684bde890176b19c
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
Kubernetes
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
OpenAPI spec version: v1.14.7
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from kubernetes_asyncio.client.api_client import ApiClient
class CertificatesV1beta1Api(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def create_certificate_signing_request(self, body, **kwargs): # noqa: E501
"""create_certificate_signing_request # noqa: E501
create a CertificateSigningRequest # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_certificate_signing_request(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param V1beta1CertificateSigningRequest body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed
:param str field_manager: fieldManager is a name associated with the actor or entity that is making these changes. The value must be less than or 128 characters long, and only contain printable characters, as defined by https://golang.org/pkg/unicode/#IsPrint.
:return: V1beta1CertificateSigningRequest
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_certificate_signing_request_with_http_info(body, **kwargs) # noqa: E501
else:
(data) = self.create_certificate_signing_request_with_http_info(body, **kwargs) # noqa: E501
return data
def create_certificate_signing_request_with_http_info(self, body, **kwargs): # noqa: E501
"""create_certificate_signing_request # noqa: E501
create a CertificateSigningRequest # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_certificate_signing_request_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param V1beta1CertificateSigningRequest body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed
:param str field_manager: fieldManager is a name associated with the actor or entity that is making these changes. The value must be less than or 128 characters long, and only contain printable characters, as defined by https://golang.org/pkg/unicode/#IsPrint.
:return: V1beta1CertificateSigningRequest
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['body', 'pretty', 'dry_run', 'field_manager'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_certificate_signing_request" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in local_var_params or
local_var_params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `create_certificate_signing_request`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'pretty' in local_var_params:
query_params.append(('pretty', local_var_params['pretty'])) # noqa: E501
if 'dry_run' in local_var_params:
query_params.append(('dryRun', local_var_params['dry_run'])) # noqa: E501
if 'field_manager' in local_var_params:
query_params.append(('fieldManager', local_var_params['field_manager'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) # noqa: E501
# Authentication setting
auth_settings = ['BearerToken'] # noqa: E501
return self.api_client.call_api(
'/apis/certificates.k8s.io/v1beta1/certificatesigningrequests', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1beta1CertificateSigningRequest', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_certificate_signing_request(self, name, **kwargs): # noqa: E501
"""delete_certificate_signing_request # noqa: E501
delete a CertificateSigningRequest # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_certificate_signing_request(name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: name of the CertificateSigningRequest (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed
:param int grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately.
:param bool orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both.
:param str propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground.
:param V1DeleteOptions body:
:return: V1Status
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_certificate_signing_request_with_http_info(name, **kwargs) # noqa: E501
else:
(data) = self.delete_certificate_signing_request_with_http_info(name, **kwargs) # noqa: E501
return data
def delete_certificate_signing_request_with_http_info(self, name, **kwargs): # noqa: E501
"""delete_certificate_signing_request # noqa: E501
delete a CertificateSigningRequest # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_certificate_signing_request_with_http_info(name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: name of the CertificateSigningRequest (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed
:param int grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately.
:param bool orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both.
:param str propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground.
:param V1DeleteOptions body:
:return: V1Status
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['name', 'pretty', 'dry_run', 'grace_period_seconds', 'orphan_dependents', 'propagation_policy', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_certificate_signing_request" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in local_var_params or
local_var_params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `delete_certificate_signing_request`") # noqa: E501
collection_formats = {}
path_params = {}
if 'name' in local_var_params:
path_params['name'] = local_var_params['name'] # noqa: E501
query_params = []
if 'pretty' in local_var_params:
query_params.append(('pretty', local_var_params['pretty'])) # noqa: E501
if 'dry_run' in local_var_params:
query_params.append(('dryRun', local_var_params['dry_run'])) # noqa: E501
if 'grace_period_seconds' in local_var_params:
query_params.append(('gracePeriodSeconds', local_var_params['grace_period_seconds'])) # noqa: E501
if 'orphan_dependents' in local_var_params:
query_params.append(('orphanDependents', local_var_params['orphan_dependents'])) # noqa: E501
if 'propagation_policy' in local_var_params:
query_params.append(('propagationPolicy', local_var_params['propagation_policy'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) # noqa: E501
# Authentication setting
auth_settings = ['BearerToken'] # noqa: E501
return self.api_client.call_api(
'/apis/certificates.k8s.io/v1beta1/certificatesigningrequests/{name}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1Status', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_collection_certificate_signing_request(self, **kwargs): # noqa: E501
"""delete_collection_certificate_signing_request # noqa: E501
delete collection of CertificateSigningRequest # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_collection_certificate_signing_request(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str pretty: If 'true', then the output is pretty printed.
:param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1Status
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_collection_certificate_signing_request_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.delete_collection_certificate_signing_request_with_http_info(**kwargs) # noqa: E501
return data
def delete_collection_certificate_signing_request_with_http_info(self, **kwargs): # noqa: E501
"""delete_collection_certificate_signing_request # noqa: E501
delete collection of CertificateSigningRequest # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_collection_certificate_signing_request_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str pretty: If 'true', then the output is pretty printed.
:param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1Status
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['pretty', '_continue', 'field_selector', 'label_selector', 'limit', 'resource_version', 'timeout_seconds', 'watch'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_collection_certificate_signing_request" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'pretty' in local_var_params:
query_params.append(('pretty', local_var_params['pretty'])) # noqa: E501
if '_continue' in local_var_params:
query_params.append(('continue', local_var_params['_continue'])) # noqa: E501
if 'field_selector' in local_var_params:
query_params.append(('fieldSelector', local_var_params['field_selector'])) # noqa: E501
if 'label_selector' in local_var_params:
query_params.append(('labelSelector', local_var_params['label_selector'])) # noqa: E501
if 'limit' in local_var_params:
query_params.append(('limit', local_var_params['limit'])) # noqa: E501
if 'resource_version' in local_var_params:
query_params.append(('resourceVersion', local_var_params['resource_version'])) # noqa: E501
if 'timeout_seconds' in local_var_params:
query_params.append(('timeoutSeconds', local_var_params['timeout_seconds'])) # noqa: E501
if 'watch' in local_var_params:
query_params.append(('watch', local_var_params['watch'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) # noqa: E501
# Authentication setting
auth_settings = ['BearerToken'] # noqa: E501
return self.api_client.call_api(
'/apis/certificates.k8s.io/v1beta1/certificatesigningrequests', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1Status', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_api_resources(self, **kwargs): # noqa: E501
"""get_api_resources # noqa: E501
get available resources # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_api_resources(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: V1APIResourceList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_api_resources_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_api_resources_with_http_info(**kwargs) # noqa: E501
return data
def get_api_resources_with_http_info(self, **kwargs): # noqa: E501
"""get_api_resources # noqa: E501
get available resources # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_api_resources_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: V1APIResourceList
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_api_resources" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) # noqa: E501
# Authentication setting
auth_settings = ['BearerToken'] # noqa: E501
return self.api_client.call_api(
'/apis/certificates.k8s.io/v1beta1/', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1APIResourceList', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def list_certificate_signing_request(self, **kwargs): # noqa: E501
"""list_certificate_signing_request # noqa: E501
list or watch objects of kind CertificateSigningRequest # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_certificate_signing_request(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str pretty: If 'true', then the output is pretty printed.
:param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1beta1CertificateSigningRequestList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.list_certificate_signing_request_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.list_certificate_signing_request_with_http_info(**kwargs) # noqa: E501
return data
def list_certificate_signing_request_with_http_info(self, **kwargs): # noqa: E501
"""list_certificate_signing_request # noqa: E501
list or watch objects of kind CertificateSigningRequest # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_certificate_signing_request_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str pretty: If 'true', then the output is pretty printed.
:param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1beta1CertificateSigningRequestList
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['pretty', '_continue', 'field_selector', 'label_selector', 'limit', 'resource_version', 'timeout_seconds', 'watch'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_certificate_signing_request" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'pretty' in local_var_params:
query_params.append(('pretty', local_var_params['pretty'])) # noqa: E501
if '_continue' in local_var_params:
query_params.append(('continue', local_var_params['_continue'])) # noqa: E501
if 'field_selector' in local_var_params:
query_params.append(('fieldSelector', local_var_params['field_selector'])) # noqa: E501
if 'label_selector' in local_var_params:
query_params.append(('labelSelector', local_var_params['label_selector'])) # noqa: E501
if 'limit' in local_var_params:
query_params.append(('limit', local_var_params['limit'])) # noqa: E501
if 'resource_version' in local_var_params:
query_params.append(('resourceVersion', local_var_params['resource_version'])) # noqa: E501
if 'timeout_seconds' in local_var_params:
query_params.append(('timeoutSeconds', local_var_params['timeout_seconds'])) # noqa: E501
if 'watch' in local_var_params:
query_params.append(('watch', local_var_params['watch'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf', 'application/json;stream=watch', 'application/vnd.kubernetes.protobuf;stream=watch']) # noqa: E501
# Authentication setting
auth_settings = ['BearerToken'] # noqa: E501
return self.api_client.call_api(
'/apis/certificates.k8s.io/v1beta1/certificatesigningrequests', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1beta1CertificateSigningRequestList', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def patch_certificate_signing_request(self, name, body, **kwargs): # noqa: E501
"""patch_certificate_signing_request # noqa: E501
partially update the specified CertificateSigningRequest # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_certificate_signing_request(name, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: name of the CertificateSigningRequest (required)
:param UNKNOWN_BASE_TYPE body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed
:param str field_manager: fieldManager is a name associated with the actor or entity that is making these changes. The value must be less than or 128 characters long, and only contain printable characters, as defined by https://golang.org/pkg/unicode/#IsPrint. This field is required for apply requests (application/apply-patch) but optional for non-apply patch types (JsonPatch, MergePatch, StrategicMergePatch).
:param bool force: Force is going to \"force\" Apply requests. It means user will re-acquire conflicting fields owned by other people. Force flag must be unset for non-apply patch requests.
:return: V1beta1CertificateSigningRequest
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.patch_certificate_signing_request_with_http_info(name, body, **kwargs) # noqa: E501
else:
(data) = self.patch_certificate_signing_request_with_http_info(name, body, **kwargs) # noqa: E501
return data
def patch_certificate_signing_request_with_http_info(self, name, body, **kwargs): # noqa: E501
"""patch_certificate_signing_request # noqa: E501
partially update the specified CertificateSigningRequest # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_certificate_signing_request_with_http_info(name, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: name of the CertificateSigningRequest (required)
:param UNKNOWN_BASE_TYPE body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed
:param str field_manager: fieldManager is a name associated with the actor or entity that is making these changes. The value must be less than or 128 characters long, and only contain printable characters, as defined by https://golang.org/pkg/unicode/#IsPrint. This field is required for apply requests (application/apply-patch) but optional for non-apply patch types (JsonPatch, MergePatch, StrategicMergePatch).
:param bool force: Force is going to \"force\" Apply requests. It means user will re-acquire conflicting fields owned by other people. Force flag must be unset for non-apply patch requests.
:return: V1beta1CertificateSigningRequest
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['name', 'body', 'pretty', 'dry_run', 'field_manager', 'force'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_certificate_signing_request" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in local_var_params or
local_var_params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `patch_certificate_signing_request`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in local_var_params or
local_var_params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `patch_certificate_signing_request`") # noqa: E501
collection_formats = {}
path_params = {}
if 'name' in local_var_params:
path_params['name'] = local_var_params['name'] # noqa: E501
query_params = []
if 'pretty' in local_var_params:
query_params.append(('pretty', local_var_params['pretty'])) # noqa: E501
if 'dry_run' in local_var_params:
query_params.append(('dryRun', local_var_params['dry_run'])) # noqa: E501
if 'field_manager' in local_var_params:
query_params.append(('fieldManager', local_var_params['field_manager'])) # noqa: E501
if 'force' in local_var_params:
query_params.append(('force', local_var_params['force'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json-patch+json', 'application/merge-patch+json', 'application/strategic-merge-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['BearerToken'] # noqa: E501
return self.api_client.call_api(
'/apis/certificates.k8s.io/v1beta1/certificatesigningrequests/{name}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1beta1CertificateSigningRequest', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def patch_certificate_signing_request_status(self, name, body, **kwargs): # noqa: E501
"""patch_certificate_signing_request_status # noqa: E501
partially update status of the specified CertificateSigningRequest # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_certificate_signing_request_status(name, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: name of the CertificateSigningRequest (required)
:param UNKNOWN_BASE_TYPE body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed
:param str field_manager: fieldManager is a name associated with the actor or entity that is making these changes. The value must be less than or 128 characters long, and only contain printable characters, as defined by https://golang.org/pkg/unicode/#IsPrint. This field is required for apply requests (application/apply-patch) but optional for non-apply patch types (JsonPatch, MergePatch, StrategicMergePatch).
:param bool force: Force is going to \"force\" Apply requests. It means user will re-acquire conflicting fields owned by other people. Force flag must be unset for non-apply patch requests.
:return: V1beta1CertificateSigningRequest
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.patch_certificate_signing_request_status_with_http_info(name, body, **kwargs) # noqa: E501
else:
(data) = self.patch_certificate_signing_request_status_with_http_info(name, body, **kwargs) # noqa: E501
return data
def patch_certificate_signing_request_status_with_http_info(self, name, body, **kwargs): # noqa: E501
"""patch_certificate_signing_request_status # noqa: E501
partially update status of the specified CertificateSigningRequest # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_certificate_signing_request_status_with_http_info(name, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: name of the CertificateSigningRequest (required)
:param UNKNOWN_BASE_TYPE body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed
:param str field_manager: fieldManager is a name associated with the actor or entity that is making these changes. The value must be less than or 128 characters long, and only contain printable characters, as defined by https://golang.org/pkg/unicode/#IsPrint. This field is required for apply requests (application/apply-patch) but optional for non-apply patch types (JsonPatch, MergePatch, StrategicMergePatch).
:param bool force: Force is going to \"force\" Apply requests. It means user will re-acquire conflicting fields owned by other people. Force flag must be unset for non-apply patch requests.
:return: V1beta1CertificateSigningRequest
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['name', 'body', 'pretty', 'dry_run', 'field_manager', 'force'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_certificate_signing_request_status" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in local_var_params or
local_var_params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `patch_certificate_signing_request_status`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in local_var_params or
local_var_params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `patch_certificate_signing_request_status`") # noqa: E501
collection_formats = {}
path_params = {}
if 'name' in local_var_params:
path_params['name'] = local_var_params['name'] # noqa: E501
query_params = []
if 'pretty' in local_var_params:
query_params.append(('pretty', local_var_params['pretty'])) # noqa: E501
if 'dry_run' in local_var_params:
query_params.append(('dryRun', local_var_params['dry_run'])) # noqa: E501
if 'field_manager' in local_var_params:
query_params.append(('fieldManager', local_var_params['field_manager'])) # noqa: E501
if 'force' in local_var_params:
query_params.append(('force', local_var_params['force'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json-patch+json', 'application/merge-patch+json', 'application/strategic-merge-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['BearerToken'] # noqa: E501
return self.api_client.call_api(
'/apis/certificates.k8s.io/v1beta1/certificatesigningrequests/{name}/status', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1beta1CertificateSigningRequest', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def read_certificate_signing_request(self, name, **kwargs): # noqa: E501
"""read_certificate_signing_request # noqa: E501
read the specified CertificateSigningRequest # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.read_certificate_signing_request(name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: name of the CertificateSigningRequest (required)
:param str pretty: If 'true', then the output is pretty printed.
:param bool exact: Should the export be exact. Exact export maintains cluster-specific fields like 'Namespace'. Deprecated. Planned for removal in 1.18.
:param bool export: Should this value be exported. Export strips fields that a user can not specify. Deprecated. Planned for removal in 1.18.
:return: V1beta1CertificateSigningRequest
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.read_certificate_signing_request_with_http_info(name, **kwargs) # noqa: E501
else:
(data) = self.read_certificate_signing_request_with_http_info(name, **kwargs) # noqa: E501
return data
def read_certificate_signing_request_with_http_info(self, name, **kwargs): # noqa: E501
"""read_certificate_signing_request # noqa: E501
read the specified CertificateSigningRequest # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.read_certificate_signing_request_with_http_info(name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: name of the CertificateSigningRequest (required)
:param str pretty: If 'true', then the output is pretty printed.
:param bool exact: Should the export be exact. Exact export maintains cluster-specific fields like 'Namespace'. Deprecated. Planned for removal in 1.18.
:param bool export: Should this value be exported. Export strips fields that a user can not specify. Deprecated. Planned for removal in 1.18.
:return: V1beta1CertificateSigningRequest
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['name', 'pretty', 'exact', 'export'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method read_certificate_signing_request" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in local_var_params or
local_var_params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `read_certificate_signing_request`") # noqa: E501
collection_formats = {}
path_params = {}
if 'name' in local_var_params:
path_params['name'] = local_var_params['name'] # noqa: E501
query_params = []
if 'pretty' in local_var_params:
query_params.append(('pretty', local_var_params['pretty'])) # noqa: E501
if 'exact' in local_var_params:
query_params.append(('exact', local_var_params['exact'])) # noqa: E501
if 'export' in local_var_params:
query_params.append(('export', local_var_params['export'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) # noqa: E501
# Authentication setting
auth_settings = ['BearerToken'] # noqa: E501
return self.api_client.call_api(
'/apis/certificates.k8s.io/v1beta1/certificatesigningrequests/{name}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1beta1CertificateSigningRequest', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def read_certificate_signing_request_status(self, name, **kwargs): # noqa: E501
"""read_certificate_signing_request_status # noqa: E501
read status of the specified CertificateSigningRequest # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.read_certificate_signing_request_status(name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: name of the CertificateSigningRequest (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1beta1CertificateSigningRequest
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.read_certificate_signing_request_status_with_http_info(name, **kwargs) # noqa: E501
else:
(data) = self.read_certificate_signing_request_status_with_http_info(name, **kwargs) # noqa: E501
return data
def read_certificate_signing_request_status_with_http_info(self, name, **kwargs): # noqa: E501
"""read_certificate_signing_request_status # noqa: E501
read status of the specified CertificateSigningRequest # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.read_certificate_signing_request_status_with_http_info(name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: name of the CertificateSigningRequest (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1beta1CertificateSigningRequest
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['name', 'pretty'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method read_certificate_signing_request_status" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in local_var_params or
local_var_params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `read_certificate_signing_request_status`") # noqa: E501
collection_formats = {}
path_params = {}
if 'name' in local_var_params:
path_params['name'] = local_var_params['name'] # noqa: E501
query_params = []
if 'pretty' in local_var_params:
query_params.append(('pretty', local_var_params['pretty'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) # noqa: E501
# Authentication setting
auth_settings = ['BearerToken'] # noqa: E501
return self.api_client.call_api(
'/apis/certificates.k8s.io/v1beta1/certificatesigningrequests/{name}/status', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1beta1CertificateSigningRequest', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def replace_certificate_signing_request(self, name, body, **kwargs): # noqa: E501
"""replace_certificate_signing_request # noqa: E501
replace the specified CertificateSigningRequest # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.replace_certificate_signing_request(name, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: name of the CertificateSigningRequest (required)
:param V1beta1CertificateSigningRequest body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed
:param str field_manager: fieldManager is a name associated with the actor or entity that is making these changes. The value must be less than or 128 characters long, and only contain printable characters, as defined by https://golang.org/pkg/unicode/#IsPrint.
:return: V1beta1CertificateSigningRequest
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.replace_certificate_signing_request_with_http_info(name, body, **kwargs) # noqa: E501
else:
(data) = self.replace_certificate_signing_request_with_http_info(name, body, **kwargs) # noqa: E501
return data
def replace_certificate_signing_request_with_http_info(self, name, body, **kwargs): # noqa: E501
"""replace_certificate_signing_request # noqa: E501
replace the specified CertificateSigningRequest # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.replace_certificate_signing_request_with_http_info(name, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: name of the CertificateSigningRequest (required)
:param V1beta1CertificateSigningRequest body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed
:param str field_manager: fieldManager is a name associated with the actor or entity that is making these changes. The value must be less than or 128 characters long, and only contain printable characters, as defined by https://golang.org/pkg/unicode/#IsPrint.
:return: V1beta1CertificateSigningRequest
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['name', 'body', 'pretty', 'dry_run', 'field_manager'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method replace_certificate_signing_request" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in local_var_params or
local_var_params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `replace_certificate_signing_request`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in local_var_params or
local_var_params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `replace_certificate_signing_request`") # noqa: E501
collection_formats = {}
path_params = {}
if 'name' in local_var_params:
path_params['name'] = local_var_params['name'] # noqa: E501
query_params = []
if 'pretty' in local_var_params:
query_params.append(('pretty', local_var_params['pretty'])) # noqa: E501
if 'dry_run' in local_var_params:
query_params.append(('dryRun', local_var_params['dry_run'])) # noqa: E501
if 'field_manager' in local_var_params:
query_params.append(('fieldManager', local_var_params['field_manager'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) # noqa: E501
# Authentication setting
auth_settings = ['BearerToken'] # noqa: E501
return self.api_client.call_api(
'/apis/certificates.k8s.io/v1beta1/certificatesigningrequests/{name}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1beta1CertificateSigningRequest', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def replace_certificate_signing_request_approval(self, name, body, **kwargs): # noqa: E501
"""replace_certificate_signing_request_approval # noqa: E501
replace approval of the specified CertificateSigningRequest # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.replace_certificate_signing_request_approval(name, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: name of the CertificateSigningRequest (required)
:param V1beta1CertificateSigningRequest body: (required)
:param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed
:param str field_manager: fieldManager is a name associated with the actor or entity that is making these changes. The value must be less than or 128 characters long, and only contain printable characters, as defined by https://golang.org/pkg/unicode/#IsPrint.
:param str pretty: If 'true', then the output is pretty printed.
:return: V1beta1CertificateSigningRequest
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.replace_certificate_signing_request_approval_with_http_info(name, body, **kwargs) # noqa: E501
else:
(data) = self.replace_certificate_signing_request_approval_with_http_info(name, body, **kwargs) # noqa: E501
return data
def replace_certificate_signing_request_approval_with_http_info(self, name, body, **kwargs): # noqa: E501
"""replace_certificate_signing_request_approval # noqa: E501
replace approval of the specified CertificateSigningRequest # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.replace_certificate_signing_request_approval_with_http_info(name, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: name of the CertificateSigningRequest (required)
:param V1beta1CertificateSigningRequest body: (required)
:param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed
:param str field_manager: fieldManager is a name associated with the actor or entity that is making these changes. The value must be less than or 128 characters long, and only contain printable characters, as defined by https://golang.org/pkg/unicode/#IsPrint.
:param str pretty: If 'true', then the output is pretty printed.
:return: V1beta1CertificateSigningRequest
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['name', 'body', 'dry_run', 'field_manager', 'pretty'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method replace_certificate_signing_request_approval" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in local_var_params or
local_var_params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `replace_certificate_signing_request_approval`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in local_var_params or
local_var_params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `replace_certificate_signing_request_approval`") # noqa: E501
collection_formats = {}
path_params = {}
if 'name' in local_var_params:
path_params['name'] = local_var_params['name'] # noqa: E501
query_params = []
if 'dry_run' in local_var_params:
query_params.append(('dryRun', local_var_params['dry_run'])) # noqa: E501
if 'field_manager' in local_var_params:
query_params.append(('fieldManager', local_var_params['field_manager'])) # noqa: E501
if 'pretty' in local_var_params:
query_params.append(('pretty', local_var_params['pretty'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) # noqa: E501
# Authentication setting
auth_settings = ['BearerToken'] # noqa: E501
return self.api_client.call_api(
'/apis/certificates.k8s.io/v1beta1/certificatesigningrequests/{name}/approval', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1beta1CertificateSigningRequest', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def replace_certificate_signing_request_status(self, name, body, **kwargs): # noqa: E501
"""replace_certificate_signing_request_status # noqa: E501
replace status of the specified CertificateSigningRequest # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.replace_certificate_signing_request_status(name, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: name of the CertificateSigningRequest (required)
:param V1beta1CertificateSigningRequest body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed
:param str field_manager: fieldManager is a name associated with the actor or entity that is making these changes. The value must be less than or 128 characters long, and only contain printable characters, as defined by https://golang.org/pkg/unicode/#IsPrint.
:return: V1beta1CertificateSigningRequest
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.replace_certificate_signing_request_status_with_http_info(name, body, **kwargs) # noqa: E501
else:
(data) = self.replace_certificate_signing_request_status_with_http_info(name, body, **kwargs) # noqa: E501
return data
def replace_certificate_signing_request_status_with_http_info(self, name, body, **kwargs): # noqa: E501
"""replace_certificate_signing_request_status # noqa: E501
replace status of the specified CertificateSigningRequest # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.replace_certificate_signing_request_status_with_http_info(name, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: name of the CertificateSigningRequest (required)
:param V1beta1CertificateSigningRequest body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed
:param str field_manager: fieldManager is a name associated with the actor or entity that is making these changes. The value must be less than or 128 characters long, and only contain printable characters, as defined by https://golang.org/pkg/unicode/#IsPrint.
:return: V1beta1CertificateSigningRequest
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['name', 'body', 'pretty', 'dry_run', 'field_manager'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method replace_certificate_signing_request_status" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in local_var_params or
local_var_params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `replace_certificate_signing_request_status`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in local_var_params or
local_var_params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `replace_certificate_signing_request_status`") # noqa: E501
collection_formats = {}
path_params = {}
if 'name' in local_var_params:
path_params['name'] = local_var_params['name'] # noqa: E501
query_params = []
if 'pretty' in local_var_params:
query_params.append(('pretty', local_var_params['pretty'])) # noqa: E501
if 'dry_run' in local_var_params:
query_params.append(('dryRun', local_var_params['dry_run'])) # noqa: E501
if 'field_manager' in local_var_params:
query_params.append(('fieldManager', local_var_params['field_manager'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) # noqa: E501
# Authentication setting
auth_settings = ['BearerToken'] # noqa: E501
return self.api_client.call_api(
'/apis/certificates.k8s.io/v1beta1/certificatesigningrequests/{name}/status', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1beta1CertificateSigningRequest', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
| 62.265423
| 1,390
| 0.690338
|
d144969a300b38bc5ca8add7fd9c4a9261ea8775
| 2,418
|
py
|
Python
|
SeleniumTest/test/Module_1/Scenario2.py
|
NayakwadiS/Selenium_Python_UnitTest_HTML
|
dceb17ccfa2a7da4659a9820333330145d648772
|
[
"MIT"
] | 2
|
2022-01-06T04:58:22.000Z
|
2022-02-09T07:21:17.000Z
|
SeleniumTest/test/Module_1/Scenario2.py
|
NayakwadiS/Selenium_Python_UnitTest_HTML
|
dceb17ccfa2a7da4659a9820333330145d648772
|
[
"MIT"
] | null | null | null |
SeleniumTest/test/Module_1/Scenario2.py
|
NayakwadiS/Selenium_Python_UnitTest_HTML
|
dceb17ccfa2a7da4659a9820333330145d648772
|
[
"MIT"
] | 4
|
2020-08-20T05:33:54.000Z
|
2022-01-14T14:13:27.000Z
|
import unittest
from Main.Generic.BaseTest import *
from Main.Pages import *
from Main.Utility.logger import *
from Main.Utility import *
from ddt import ddt, data
def getData(fileName,TestCaseID):
return ReadLine(fileName,TestCaseID)
@ddt
class Scenario2(unittest.TestCase,baseTest):
logger = HTMlLogger() # function_logger(logging.INFO, logging.ERROR)
@classmethod
def setUp(cls):
baseTest().TestCaseInit(cls.logger)
@data(getData("TestData.xlsx", 'test_Scenario2'))
def test_Scenario2(self, currentRow):
self.logger.assert_testcase_log("test_Scenario2")
driver = baseTest().getDriver()
try:
pglogin = Login(driver, self.logger)
pglogin.login()
pgHome = Home(driver,self.logger)
pgHome.navigateToCasualDress()
pgCasualDress = CasualDress(driver,self.logger)
pgCasualDress.selectSizeMedium()
pgCasualDress.selectCasualDress()
pgPrintedDress = PrintedDress(driver,self.logger)
pgPrintedDress.selectQuantity(currentRow['Quntity'])
pgPrintedDress.addItemToCart()
pgPrintedDress.ContinueShopping()
pgHome.navigateToTshirts()
pgTShirt = TShirt(driver,self.logger)
pgTShirt.selectSizesmall()
pgTShirt.selectTShirt()
pgFadedTshirt = FadedTShirt(driver,self.logger)
pgFadedTshirt.selectQuantity(currentRow['Quntity'])
pgFadedTshirt.selectColor(currentRow['Color'])
pgFadedTshirt.addItemToCart()
pgFadedTshirt.proceedToCheckOut()
pgOrder = Order(driver, self.logger)
pgOrder.ProceedToCheckout()
# pgOrder.selectDeliveryAddrAsBillingAddr()
pgOrder.ProceedToCheckoutInAddress()
pgOrder.ProceedToCheckoutInShipping()
pgOrder.verifyTermsOfServiceError()
pgOrder.acceptTermsOfService()
pgOrder.ProceedToCheckoutInShipping()
pgOrder.selectPaymentMode(currentRow['Payment Mode'])
pgOrder.confirmOrder()
except Exception as e:
self.logger.assert_step_fail_log(driver, str(e))
@classmethod
def tearDown(cls):
baseTest().TestCaseExit(cls.logger)
if __name__ == "__main__":
unittest.main()
| 32.675676
| 74
| 0.63689
|
6c38fccbddfa264be8252947f28b5c8ef44bca72
| 5,141
|
py
|
Python
|
lib/python/test.py
|
MeitalRann/Feature-and-Arcitecture-Study-for-Speech-Activity-Detection
|
1f37d2090c41081ccedc0dd1fa0d1721b026b396
|
[
"MIT"
] | 1
|
2021-01-13T00:09:02.000Z
|
2021-01-13T00:09:02.000Z
|
lib/python/test.py
|
MeitalRann/Feature-and-Arcitecture-Study-for-Speech-Activity-Detection
|
1f37d2090c41081ccedc0dd1fa0d1721b026b396
|
[
"MIT"
] | null | null | null |
lib/python/test.py
|
MeitalRann/Feature-and-Arcitecture-Study-for-Speech-Activity-Detection
|
1f37d2090c41081ccedc0dd1fa0d1721b026b396
|
[
"MIT"
] | null | null | null |
import numpy as np
import sys
import os, getopt
import scipy.io as sio
from sklearn.metrics import accuracy_score
sys.path.insert(0, r'.\lib\python')
import eer_test as err_test
import feat_setting as fs
import test_utils as utils
if __name__ == '__main__':
try:
opts, args = getopt.getopt(sys.argv[1:], 'hm:e:f:', ["prj_dir="])
except getopt.GetoptError as err:
print(str(err))
sys.exit(1)
if len(opts) != 4:
print("arguments are not enough.")
sys.exit(1)
for opt, arg in opts:
if opt == '-h':
sys.exit(0)
elif opt == '-m':
mode = int(arg)
elif opt == '-e':
extract_feat = int(arg)
elif opt == '--prj_dir':
prj_dir = str(arg)
elif opt == '-f':
feat = int(arg)
set_feat = fs.featSetting(feat)
f_name = set_feat.name
f_dim = set_feat.dimension
output_type = 0
is_default = 0
th = 0.5
if mode == 0:
mode_name = 'ACAM'
elif mode == 1:
mode_name = 'bDNN'
elif mode == 2:
mode_name = 'DNN'
elif mode == 3:
mode_name = 'LSTM'
try:
os.remove(prj_dir + r'\\result\\' + mode_name + r'\\' + f_name + r'\AUC.txt')
except:
print("Error while deleting file ", prj_dir + r'\\result\\' + mode_name + r'\\' + f_name + r'\AUC.txt')
try:
print('Create ', prj_dir + r'\\result\\' + mode_name + r'\\' + f_name)
os.makedirs(prj_dir + r'\\result\\' + mode_name + r'\\' + f_name)
except:
print(prj_dir + r'\\result\\' + mode_name + r'\\' + f_name, 'Exists')
train_dir = prj_dir + r"\data\raw\test"
dir_list = list_subfolders_with_paths = [f.path for f in os.scandir(train_dir) if f.is_dir()]
for n in range(len(dir_list)):
folder = dir_list[n]
subfolders = [f.name for f in os.scandir(folder) if f.is_dir()]
for dir in subfolders:
dir_name = dir
test_dir = folder + r'\\' + dir
tot_auc = utils.vad_func(prj_dir, test_dir, mode, th, extract_feat, is_default, feat, f_name, dir_name,
off_on_length=30,
on_off_length=30, hang_before=0, hang_over=0)
print(tot_auc)
with open(prj_dir + r'\\result\\' + mode_name + r'\\' + f_name + r'\AUC.txt', 'a') as the_file:
the_file.write(dir_name + ': ' + str(tot_auc) + ' \n')
if len(subfolders) == 0:
test_dir = folder
dir_name = folder.split('\\')[-1]
tot_auc = utils.vad_func(prj_dir, test_dir, mode, th, extract_feat, is_default, feat, f_name, dir_name,
off_on_length=30,
on_off_length=30, hang_before=0, hang_over=0)
print(tot_auc)
with open(prj_dir + r'\\result\\' + mode_name + r'\\' + f_name + r'\AUC.txt', 'a') as the_file:
the_file.write(dir_name + ': ' + str(tot_auc) + ' \n')
# # noise independent testset:
# folder = r'C:\meital\University\Madison\Thesis\VAD-py\data\raw\test\noisy'
# subfolders = [f.name for f in os.scandir(folder) if f.is_dir()]
# for dir in subfolders:
# dir_name = dir
# test_dir = folder + r'\\' + dir
# tot_auc = utils.vad_func(prj_dir, test_dir, mode, th, extract_feat, is_default, feat, f_name, dir_name, off_on_length=30,
# on_off_length=30, hang_before=0, hang_over=0)
# print(tot_auc)
# with open(prj_dir + r'\\result\\' + mode_name + r'\\' + f_name + r'\AUC.txt', 'a') as the_file:
# the_file.write(dir_name + ': ' + str(tot_auc) + ' \n')
#
#
# # recorded testset:
# folder = r'C:\meital\University\Madison\Thesis\VAD-py\data\raw\test\recorded_data'
# subfolders = [f.name for f in os.scandir(folder) if f.is_dir()]
# for dir in subfolders:
# dir_name = dir
# test_dir = folder + r'\\' + dir
# tot_auc = utils.vad_func(prj_dir, test_dir, mode, th, extract_feat, is_default, feat, f_name, dir_name, off_on_length=30,
# on_off_length=30, hang_before=0, hang_over=0)
# print(tot_auc)
# with open(prj_dir + r'\\result\\' + mode_name + r'\\' + f_name + r'\AUC.txt', 'a') as the_file:
# the_file.write(dir_name + ': ' + str(tot_auc) + ' \n')
#
# # noise dependent testset:
# dir_name = 'matched'
# test_dir = r'C:\meital\University\Madison\Thesis\VAD-py\data\raw\test\matched'
# tot_auc = utils.vad_func(prj_dir, test_dir, mode, th, extract_feat, is_default, feat, f_name, dir_name, off_on_length=30,
# on_off_length=30, hang_before=0, hang_over=0)
# print(tot_auc)
# with open(prj_dir + r'\\result\\' + mode_name + r'\\' + f_name + r'\AUC.txt', 'a') as the_file:
# the_file.write(dir_name + ': ' + str(tot_auc) + ' \n')
| 40.480315
| 132
| 0.538806
|
9f8b2598b1c1b7c86c3acc04acbefee30645b250
| 6,468
|
py
|
Python
|
hubspot/crm/deals/api/search_api.py
|
cclauss/hubspot-api-python
|
7c60c0f572b98c73e1f1816bf5981396a42735f6
|
[
"Apache-2.0"
] | null | null | null |
hubspot/crm/deals/api/search_api.py
|
cclauss/hubspot-api-python
|
7c60c0f572b98c73e1f1816bf5981396a42735f6
|
[
"Apache-2.0"
] | null | null | null |
hubspot/crm/deals/api/search_api.py
|
cclauss/hubspot-api-python
|
7c60c0f572b98c73e1f1816bf5981396a42735f6
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
Deals
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
The version of the OpenAPI document: v3
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from hubspot.crm.deals.api_client import ApiClient
from hubspot.crm.deals.exceptions import ( # noqa: F401
ApiTypeError,
ApiValueError
)
class SearchApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def do_search(self, public_object_search_request, **kwargs): # noqa: E501
"""Filter, Sort, and Search CRM Objects # noqa: E501
Filter, Sort, and Search CRM Objects # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.do_search(public_object_search_request, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param PublicObjectSearchRequest public_object_search_request: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: CollectionResponseWithTotalSimplePublicObjectForwardPaging
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.do_search_with_http_info(public_object_search_request, **kwargs) # noqa: E501
def do_search_with_http_info(self, public_object_search_request, **kwargs): # noqa: E501
"""Filter, Sort, and Search CRM Objects # noqa: E501
Filter, Sort, and Search CRM Objects # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.do_search_with_http_info(public_object_search_request, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param PublicObjectSearchRequest public_object_search_request: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(CollectionResponseWithTotalSimplePublicObjectForwardPaging, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'public_object_search_request'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method do_search" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'public_object_search_request' is set
if self.api_client.client_side_validation and ('public_object_search_request' not in local_var_params or # noqa: E501
local_var_params['public_object_search_request'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `public_object_search_request` when calling `do_search`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'public_object_search_request' in local_var_params:
body_params = local_var_params['public_object_search_request']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', '*/*']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['hapikey', 'oauth2'] # noqa: E501
return self.api_client.call_api(
'/crm/v3/objects/deals/search', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CollectionResponseWithTotalSimplePublicObjectForwardPaging', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
| 41.461538
| 135
| 0.624613
|
b79b97e0158af05983196f3c1719a74886a6e85e
| 7,307
|
py
|
Python
|
src/collectors/jolokia/test/testjolokia.py
|
Coolknight/Diamond
|
651bda27e1129e1cf892ae15bdd7b959778b9990
|
[
"MIT"
] | 2
|
2016-10-24T02:51:32.000Z
|
2021-01-09T20:49:44.000Z
|
src/collectors/jolokia/test/testjolokia.py
|
Coolknight/Diamond
|
651bda27e1129e1cf892ae15bdd7b959778b9990
|
[
"MIT"
] | 1
|
2016-04-06T00:00:35.000Z
|
2016-04-06T00:00:35.000Z
|
src/collectors/jolokia/test/testjolokia.py
|
Coolknight/Diamond
|
651bda27e1129e1cf892ae15bdd7b959778b9990
|
[
"MIT"
] | 2
|
2016-05-12T08:01:52.000Z
|
2016-05-16T20:39:47.000Z
|
#!/usr/bin/python
# coding=utf-8
##########################################################################
from test import CollectorTestCase
from test import get_collector_config
from test import unittest
from mock import Mock
from mock import patch
from diamond.collector import Collector
from jolokia import JolokiaCollector
##########################################################################
class TestJolokiaCollector(CollectorTestCase):
def setUp(self):
config = get_collector_config('JolokiaCollector', {})
self.collector = JolokiaCollector(config, None)
def test_import(self):
self.assertTrue(JolokiaCollector)
@patch.object(Collector, 'publish')
def test_should_work_with_real_data(self, publish_mock):
def se(url):
if url == 'http://localhost:8778/jolokia/list':
return self.getFixture('listing')
else:
return self.getFixture('stats')
patch_urlopen = patch('urllib2.urlopen', Mock(side_effect=se))
patch_urlopen.start()
self.collector.collect()
patch_urlopen.stop()
metrics = self.get_metrics()
self.setDocExample(collector=self.collector.__class__.__name__,
metrics=metrics,
defaultpath=self.collector.config['path'])
self.assertPublishedMany(publish_mock, metrics)
@patch.object(Collector, 'publish')
def test_real_data_with_rewrite(self, publish_mock):
def se(url):
if url == 'http://localhost:8778/jolokia/list':
return self.getFixture('listing')
else:
return self.getFixture('stats')
patch_urlopen = patch('urllib2.urlopen', Mock(side_effect=se))
patch_urlopen.start()
self.collector.rewrite = {'memoryUsage': 'memUsed', '.*\.init': ''}
self.collector.collect()
patch_urlopen.stop()
rewritemetrics = self.get_metrics_rewrite_test()
self.assertPublishedMany(publish_mock, rewritemetrics)
@patch.object(Collector, 'publish')
def test_should_work_with_real_data_and_basic_auth(self, publish_mock):
self.collector.config["username"] = "user"
self.collector.config["password"] = "password"
self.test_should_work_with_real_data()
@patch.object(Collector, 'publish')
def test_should_fail_gracefully(self, publish_mock):
patch_urlopen = patch('urllib2.urlopen', Mock(
return_value=self.getFixture('stats_blank')))
patch_urlopen.start()
self.collector.collect()
patch_urlopen.stop()
self.assertPublishedMany(publish_mock, {})
@patch.object(Collector, 'publish')
def test_should_skip_when_mbean_request_fails(self, publish_mock):
def se(url):
if url == 'http://localhost:8778/jolokia/list':
return self.getFixture('listing_with_bad_mbean')
elif url == ('http://localhost:8778/jolokia/?ignoreErrors=true'
'&p=read/xxx.bad.package:*'):
return self.getFixture('stats_error')
else:
return self.getFixture('stats')
patch_urlopen = patch('urllib2.urlopen', Mock(side_effect=se))
patch_urlopen.start()
self.collector.collect()
patch_urlopen.stop()
metrics = self.get_metrics()
self.setDocExample(collector=self.collector.__class__.__name__,
metrics=metrics,
defaultpath=self.collector.config['path'])
self.assertPublishedMany(publish_mock, metrics)
def test_should_escape_jolokia_domains(self):
domain_with_slash = self.collector.escape_domain('some/domain')
domain_with_bang = self.collector.escape_domain('some!domain')
domain_with_quote = self.collector.escape_domain('some"domain')
self.assertEqual(domain_with_slash, 'some%21/domain')
self.assertEqual(domain_with_bang, 'some%21%21domain')
self.assertEqual(domain_with_quote, 'some%21%22domain')
def get_metrics(self):
prefix = 'java.lang.name_ParNew.type_GarbageCollector.LastGcInfo'
return {
prefix + '.startTime': 14259063,
prefix + '.id': 219,
prefix + '.duration': 2,
prefix + '.memoryUsageBeforeGc.Par_Eden_Space.max': 25165824,
prefix + '.memoryUsageBeforeGc.Par_Eden_Space.committed': 25165824,
prefix + '.memoryUsageBeforeGc.Par_Eden_Space.init': 25165824,
prefix + '.memoryUsageBeforeGc.Par_Eden_Space.used': 25165824,
prefix + '.memoryUsageBeforeGc.CMS_Old_Gen.max': 73400320,
prefix + '.memoryUsageBeforeGc.CMS_Old_Gen.committed': 73400320,
prefix + '.memoryUsageBeforeGc.CMS_Old_Gen.init': 73400320,
prefix + '.memoryUsageBeforeGc.CMS_Old_Gen.used': 5146840,
prefix + '.memoryUsageBeforeGc.CMS_Perm_Gen.max': 85983232,
prefix + '.memoryUsageBeforeGc.CMS_Perm_Gen.committed': 23920640,
prefix + '.memoryUsageBeforeGc.CMS_Perm_Gen.init': 21757952,
prefix + '.memoryUsageBeforeGc.CMS_Perm_Gen.used': 23796992,
prefix + '.memoryUsageBeforeGc.Code_Cache.max': 50331648,
prefix + '.memoryUsageBeforeGc.Code_Cache.committed': 2686976,
prefix + '.memoryUsageBeforeGc.Code_Cache.init': 2555904,
prefix + '.memoryUsageBeforeGc.Code_Cache.used': 2600768,
prefix + '.memoryUsageBeforeGc.Par_Survivor_Space.max': 3145728,
prefix + '.memoryUsageBeforeGc.Par_Survivor_Space.committed':
3145728,
prefix + '.memoryUsageBeforeGc.Par_Survivor_Space.init': 3145728,
prefix + '.memoryUsageBeforeGc.Par_Survivor_Space.used': 414088
}
def get_metrics_rewrite_test(self):
prefix = 'java.lang.name_ParNew.type_GarbageCollector.LastGcInfo'
return {
prefix + '.startTime': 14259063,
prefix + '.id': 219,
prefix + '.duration': 2,
prefix + '.memUsedBeforeGc.Par_Eden_Space.max': 25165824,
prefix + '.memUsedBeforeGc.Par_Eden_Space.committed': 25165824,
prefix + '.memUsedBeforeGc.Par_Eden_Space.used': 25165824,
prefix + '.memUsedBeforeGc.CMS_Old_Gen.max': 73400320,
prefix + '.memUsedBeforeGc.CMS_Old_Gen.committed': 73400320,
prefix + '.memUsedBeforeGc.CMS_Old_Gen.used': 5146840,
prefix + '.memUsedBeforeGc.CMS_Perm_Gen.max': 85983232,
prefix + '.memUsedBeforeGc.CMS_Perm_Gen.committed': 23920640,
prefix + '.memUsedBeforeGc.CMS_Perm_Gen.used': 23796992,
prefix + '.memUsedBeforeGc.Code_Cache.max': 50331648,
prefix + '.memUsedBeforeGc.Code_Cache.committed': 2686976,
prefix + '.memUsedBeforeGc.Code_Cache.used': 2600768,
prefix + '.memUsedBeforeGc.Par_Survivor_Space.max': 3145728,
prefix + '.memUsedBeforeGc.Par_Survivor_Space.committed': 3145728,
prefix + '.memUsedBeforeGc.Par_Survivor_Space.used': 414088
}
##########################################################################
if __name__ == "__main__":
unittest.main()
| 44.018072
| 79
| 0.628849
|
6a0ad479cb5e02adc948370bbd47aae6a72a57e2
| 63
|
py
|
Python
|
litex/build/quicklogic/__init__.py
|
osterwood/litex
|
db20cb172dc982c5879aa8080ec7aa18de181cc5
|
[
"ADSL"
] | 1,501
|
2016-04-19T18:16:21.000Z
|
2022-03-31T17:46:31.000Z
|
litex/build/quicklogic/__init__.py
|
osterwood/litex
|
db20cb172dc982c5879aa8080ec7aa18de181cc5
|
[
"ADSL"
] | 1,135
|
2016-04-19T05:49:14.000Z
|
2022-03-31T15:21:19.000Z
|
litex/build/quicklogic/__init__.py
|
osterwood/litex
|
db20cb172dc982c5879aa8080ec7aa18de181cc5
|
[
"ADSL"
] | 357
|
2016-04-19T05:00:24.000Z
|
2022-03-31T11:28:32.000Z
|
from litex.build.quicklogic.platform import QuickLogicPlatform
| 31.5
| 62
| 0.888889
|
06300c7747e756c6113dda6065f529a2199392ae
| 3,781
|
py
|
Python
|
contrib/macdeploy/custom_dsstore.py
|
PhineasNa/HawkCoin
|
757ef51639e2d6142aa110d74decd5d779f46950
|
[
"MIT"
] | 1
|
2021-12-13T17:10:06.000Z
|
2021-12-13T17:10:06.000Z
|
contrib/macdeploy/custom_dsstore.py
|
PhineasNa/HawkCoin
|
757ef51639e2d6142aa110d74decd5d779f46950
|
[
"MIT"
] | null | null | null |
contrib/macdeploy/custom_dsstore.py
|
PhineasNa/HawkCoin
|
757ef51639e2d6142aa110d74decd5d779f46950
|
[
"MIT"
] | 1
|
2021-12-15T15:10:19.000Z
|
2021-12-15T15:10:19.000Z
|
#!/usr/bin/env python
# Copyright (c) 2013-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
from __future__ import division,print_function,unicode_literals
import biplist
from ds_store import DSStore
from mac_alias import Alias
import sys
output_file = sys.argv[1]
package_name_ns = sys.argv[2]
ds = DSStore.open(output_file, 'w+')
ds['.']['bwsp'] = {
'ShowStatusBar': False,
'WindowBounds': b'{{300, 280}, {500, 343}}',
'ContainerShowSidebar': False,
'SidebarWidth': 0,
'ShowTabView': False,
'PreviewPaneVisibility': False,
'ShowToolbar': False,
'ShowSidebar': False,
'ShowPathbar': True
}
icvp = {
'gridOffsetX': 0.0,
'textSize': 12.0,
'viewOptionsVersion': 1,
'backgroundImageAlias': b'\x00\x00\x00\x00\x02\x1e\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xd1\x94\\\xb0H+\x00\x05\x00\x00\x00\x98\x0fbackground.tiff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x99\xd19\xb0\xf8\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\r\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0b.background\x00\x00\x10\x00\x08\x00\x00\xd1\x94\\\xb0\x00\x00\x00\x11\x00\x08\x00\x00\xd19\xb0\xf8\x00\x00\x00\x01\x00\x04\x00\x00\x00\x98\x00\x0e\x00 \x00\x0f\x00b\x00a\x00c\x00k\x00g\x00r\x00o\x00u\x00n\x00d\x00.\x00t\x00i\x00f\x00f\x00\x0f\x00\x02\x00\x00\x00\x12\x00\x1c/.background/background.tiff\x00\x14\x01\x06\x00\x00\x00\x00\x01\x06\x00\x02\x00\x00\x0cMacintosh HD\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xce\x97\xab\xc3H+\x00\x00\x01\x88[\x88\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02u\xab\x8d\xd1\x94\\\xb0devrddsk\xff\xff\xff\xff\x00\x00\t \x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07bitcoin\x00\x00\x10\x00\x08\x00\x00\xce\x97\xab\xc3\x00\x00\x00\x11\x00\x08\x00\x00\xd1\x94\\\xb0\x00\x00\x00\x01\x00\x14\x01\x88[\x88\x00\x16\xa9\t\x00\x08\xfaR\x00\x08\xfaQ\x00\x02d\x8e\x00\x0e\x00\x02\x00\x00\x00\x0f\x00\x1a\x00\x0c\x00M\x00a\x00c\x00i\x00n\x00t\x00o\x00s\x00h\x00 \x00H\x00D\x00\x13\x00\x01/\x00\x00\x15\x00\x02\x00\x14\xff\xff\x00\x00\xff\xff\x00\x00',
'backgroundColorBlue': 1.0,
'iconSize': 96.0,
'backgroundColorGreen': 1.0,
'arrangeBy': 'none',
'showIconPreview': True,
'gridSpacing': 100.0,
'gridOffsetY': 0.0,
'showItemInfo': False,
'labelOnBottom': True,
'backgroundType': 2,
'backgroundColorRed': 1.0
}
alias = Alias.from_bytes(icvp['backgroundImageAlias'])
alias.volume.name = package_name_ns
alias.volume.posix_path = '/Volumes/' + package_name_ns
alias.volume.disk_image_alias.target.filename = package_name_ns + '.temp.dmg'
alias.volume.disk_image_alias.target.carbon_path = 'Macintosh HD:Users:\x00litecoinuser:\x00Documents:\x00litecoin:\x00litecoin:\x00' + package_name_ns + '.temp.dmg'
alias.volume.disk_image_alias.target.posix_path = 'Users/litecoinuser/Documents/litecoin/litecoin/' + package_name_ns + '.temp.dmg'
alias.target.carbon_path = package_name_ns + ':.background:\x00background.tiff'
icvp['backgroundImageAlias'] = biplist.Data(alias.to_bytes())
ds['.']['icvp'] = icvp
ds['.']['vSrn'] = ('long', 1)
ds['Applications']['Iloc'] = (370, 156)
ds['Hawkcoin-Qt.app']['Iloc'] = (128, 156)
ds.flush()
ds.close()
| 61.983607
| 1,817
| 0.72785
|
34073dcce3cd88997c096839bc122999a1865fe8
| 516
|
py
|
Python
|
tests/r/test_respdis.py
|
hajime9652/observations
|
2c8b1ac31025938cb17762e540f2f592e302d5de
|
[
"Apache-2.0"
] | 199
|
2017-07-24T01:34:27.000Z
|
2022-01-29T00:50:55.000Z
|
tests/r/test_respdis.py
|
hajime9652/observations
|
2c8b1ac31025938cb17762e540f2f592e302d5de
|
[
"Apache-2.0"
] | 46
|
2017-09-05T19:27:20.000Z
|
2019-01-07T09:47:26.000Z
|
tests/r/test_respdis.py
|
hajime9652/observations
|
2c8b1ac31025938cb17762e540f2f592e302d5de
|
[
"Apache-2.0"
] | 45
|
2017-07-26T00:10:44.000Z
|
2022-03-16T20:44:59.000Z
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import shutil
import sys
import tempfile
from observations.r.respdis import respdis
def test_respdis():
"""Test module respdis.py by downloading
respdis.csv and testing shape of
extracted data has 111 rows and 5 columns
"""
test_path = tempfile.mkdtemp()
x_train, metadata = respdis(test_path)
try:
assert x_train.shape == (111, 5)
except:
shutil.rmtree(test_path)
raise()
| 21.5
| 44
| 0.753876
|
dc15daa1d2c52f972c4109cd88079b34980af9e3
| 719
|
py
|
Python
|
tests/test_action_parse_xml.py
|
cognifloyd/stackstorm-xml
|
aa842e4ebbe7320af959d8d539d6980633053c6f
|
[
"Apache-2.0"
] | 164
|
2015-01-17T16:08:33.000Z
|
2021-08-03T02:34:07.000Z
|
tests/test_action_parse_xml.py
|
cognifloyd/stackstorm-xml
|
aa842e4ebbe7320af959d8d539d6980633053c6f
|
[
"Apache-2.0"
] | 442
|
2015-01-01T11:19:01.000Z
|
2017-09-06T23:26:17.000Z
|
tests/test_action_parse_xml.py
|
cognifloyd/stackstorm-xml
|
aa842e4ebbe7320af959d8d539d6980633053c6f
|
[
"Apache-2.0"
] | 202
|
2015-01-13T00:37:40.000Z
|
2020-11-07T11:30:10.000Z
|
from st2tests.base import BaseActionTestCase
from parse_xml import ParseXMLAction
__all__ = [
'ParseXMLActionTestCase'
]
MOCK_DATA_1 = """
<note>
<to>Tove</to>
<from>Jani</from>
<heading>Reminder</heading>
<body>Don't forget me this weekend!</body>
</note>
""".strip()
class ParseXMLActionTestCase(BaseActionTestCase):
action_cls = ParseXMLAction
def test_run(self):
result = self.get_action_instance().run(data=MOCK_DATA_1)
expected = {
'note': {
'to': 'Tove',
'from': 'Jani',
'heading': 'Reminder',
'body': 'Don\'t forget me this weekend!'
}
}
self.assertEqual(result, expected)
| 21.787879
| 65
| 0.59388
|
3a557db5bf860fea0fe7c657726195c3418930ad
| 9,221
|
py
|
Python
|
sdk/python/pulumi_azure_native/dataprotection/backup_policy.py
|
sebtelko/pulumi-azure-native
|
711ec021b5c73da05611c56c8a35adb0ce3244e4
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure_native/dataprotection/backup_policy.py
|
sebtelko/pulumi-azure-native
|
711ec021b5c73da05611c56c8a35adb0ce3244e4
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure_native/dataprotection/backup_policy.py
|
sebtelko/pulumi-azure-native
|
711ec021b5c73da05611c56c8a35adb0ce3244e4
|
[
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._enums import *
from ._inputs import *
__all__ = ['BackupPolicyArgs', 'BackupPolicy']
@pulumi.input_type
class BackupPolicyArgs:
def __init__(__self__, *,
resource_group_name: pulumi.Input[str],
vault_name: pulumi.Input[str],
backup_policy_name: Optional[pulumi.Input[str]] = None,
properties: Optional[pulumi.Input['BackupPolicyArgs']] = None):
"""
The set of arguments for constructing a BackupPolicy resource.
:param pulumi.Input[str] resource_group_name: The name of the resource group where the backup vault is present.
:param pulumi.Input[str] vault_name: The name of the backup vault.
:param pulumi.Input['BackupPolicyArgs'] properties: BaseBackupPolicyResource properties
"""
pulumi.set(__self__, "resource_group_name", resource_group_name)
pulumi.set(__self__, "vault_name", vault_name)
if backup_policy_name is not None:
pulumi.set(__self__, "backup_policy_name", backup_policy_name)
if properties is not None:
pulumi.set(__self__, "properties", properties)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Input[str]:
"""
The name of the resource group where the backup vault is present.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: pulumi.Input[str]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter(name="vaultName")
def vault_name(self) -> pulumi.Input[str]:
"""
The name of the backup vault.
"""
return pulumi.get(self, "vault_name")
@vault_name.setter
def vault_name(self, value: pulumi.Input[str]):
pulumi.set(self, "vault_name", value)
@property
@pulumi.getter(name="backupPolicyName")
def backup_policy_name(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "backup_policy_name")
@backup_policy_name.setter
def backup_policy_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "backup_policy_name", value)
@property
@pulumi.getter
def properties(self) -> Optional[pulumi.Input['BackupPolicyArgs']]:
"""
BaseBackupPolicyResource properties
"""
return pulumi.get(self, "properties")
@properties.setter
def properties(self, value: Optional[pulumi.Input['BackupPolicyArgs']]):
pulumi.set(self, "properties", value)
class BackupPolicy(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
backup_policy_name: Optional[pulumi.Input[str]] = None,
properties: Optional[pulumi.Input[pulumi.InputType['BackupPolicyArgs']]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
vault_name: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
BaseBackupPolicy resource
API Version: 2021-01-01.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[pulumi.InputType['BackupPolicyArgs']] properties: BaseBackupPolicyResource properties
:param pulumi.Input[str] resource_group_name: The name of the resource group where the backup vault is present.
:param pulumi.Input[str] vault_name: The name of the backup vault.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: BackupPolicyArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
BaseBackupPolicy resource
API Version: 2021-01-01.
:param str resource_name: The name of the resource.
:param BackupPolicyArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(BackupPolicyArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
backup_policy_name: Optional[pulumi.Input[str]] = None,
properties: Optional[pulumi.Input[pulumi.InputType['BackupPolicyArgs']]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
vault_name: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = BackupPolicyArgs.__new__(BackupPolicyArgs)
__props__.__dict__["backup_policy_name"] = backup_policy_name
__props__.__dict__["properties"] = properties
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__.__dict__["resource_group_name"] = resource_group_name
if vault_name is None and not opts.urn:
raise TypeError("Missing required property 'vault_name'")
__props__.__dict__["vault_name"] = vault_name
__props__.__dict__["name"] = None
__props__.__dict__["system_data"] = None
__props__.__dict__["type"] = None
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-nextgen:dataprotection:BackupPolicy"), pulumi.Alias(type_="azure-native:dataprotection/v20210101:BackupPolicy"), pulumi.Alias(type_="azure-nextgen:dataprotection/v20210101:BackupPolicy"), pulumi.Alias(type_="azure-native:dataprotection/v20210201preview:BackupPolicy"), pulumi.Alias(type_="azure-nextgen:dataprotection/v20210201preview:BackupPolicy")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(BackupPolicy, __self__).__init__(
'azure-native:dataprotection:BackupPolicy',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'BackupPolicy':
"""
Get an existing BackupPolicy resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = BackupPolicyArgs.__new__(BackupPolicyArgs)
__props__.__dict__["name"] = None
__props__.__dict__["properties"] = None
__props__.__dict__["system_data"] = None
__props__.__dict__["type"] = None
return BackupPolicy(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Resource name associated with the resource.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def properties(self) -> pulumi.Output['outputs.BackupPolicyResponse']:
"""
BaseBackupPolicyResource properties
"""
return pulumi.get(self, "properties")
@property
@pulumi.getter(name="systemData")
def system_data(self) -> pulumi.Output['outputs.SystemDataResponse']:
"""
Metadata pertaining to creation and last modification of the resource.
"""
return pulumi.get(self, "system_data")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
"""
Resource type represents the complete path of the form Namespace/ResourceType/ResourceType/...
"""
return pulumi.get(self, "type")
| 42.689815
| 430
| 0.656436
|
7a1ada8569c8b4fc7c73ac04c1cb9c71ece0e9bc
| 2,136
|
py
|
Python
|
misago/users/tests/test_mention_api.py
|
HenryChenV/iJiangNan
|
68f156d264014939f0302222e16e3125119dd3e3
|
[
"MIT"
] | 1
|
2017-07-25T03:04:36.000Z
|
2017-07-25T03:04:36.000Z
|
misago/users/tests/test_mention_api.py
|
HenryChenV/iJiangNan
|
68f156d264014939f0302222e16e3125119dd3e3
|
[
"MIT"
] | null | null | null |
misago/users/tests/test_mention_api.py
|
HenryChenV/iJiangNan
|
68f156d264014939f0302222e16e3125119dd3e3
|
[
"MIT"
] | null | null | null |
from django.contrib.auth import get_user_model
from django.test import TestCase
from django.urls import reverse
from misago.conf import settings
UserModel = get_user_model()
class AuthenticateAPITests(TestCase):
def setUp(self):
self.api_link = reverse('misago:api:mention-suggestions')
def test_no_query(self):
"""api returns empty result set if no query is given"""
response = self.client.get(self.api_link)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.json(), [])
def test_no_results(self):
"""api returns empty result set if no query is given"""
response = self.client.get(self.api_link + '?q=none')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.json(), [])
def test_user_search(self):
"""api searches uses"""
UserModel.objects.create_user('BobBoberson', 'bob@test.com', 'pass123')
# exact case sensitive match
response = self.client.get(self.api_link + '?q=BobBoberson')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.json(), [
{
'avatar': '/placekitten.com/400/400',
'username': 'BobBoberson',
}
])
# rought case insensitive match
response = self.client.get(self.api_link + '?q=bob')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.json(), [
{
'avatar': '/placekitten.com/400/400',
'username': 'BobBoberson',
}
])
# eager case insensitive match
response = self.client.get(self.api_link + '?q=b')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.json(), [
{
'avatar': '/placekitten.com/400/400',
'username': 'BobBoberson',
}
])
# invalid match
response = self.client.get(self.api_link + '?q=bu')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.json(), [])
| 31.880597
| 79
| 0.600187
|
9d6992533b6807413eeb22d8eb29bd896bd60047
| 3,225
|
py
|
Python
|
examples/new_project_templates/single_gpu_node_dp_template.py
|
SallyOne/pytorch-lightning
|
4cfcec0616d4cefbeb7228a904a1f14e0f6a72a4
|
[
"Apache-2.0"
] | 1
|
2019-08-06T08:23:54.000Z
|
2019-08-06T08:23:54.000Z
|
examples/new_project_templates/single_gpu_node_dp_template.py
|
GreatWizard9519/pytorch-lightning
|
4cfcec0616d4cefbeb7228a904a1f14e0f6a72a4
|
[
"Apache-2.0"
] | null | null | null |
examples/new_project_templates/single_gpu_node_dp_template.py
|
GreatWizard9519/pytorch-lightning
|
4cfcec0616d4cefbeb7228a904a1f14e0f6a72a4
|
[
"Apache-2.0"
] | null | null | null |
"""
Runs a model on a single node across N-gpus using dataParallel
"""
import os
import numpy as np
import torch
from test_tube import HyperOptArgumentParser, Experiment
from pytorch_lightning.models.trainer import Trainer
from pytorch_lightning.callbacks import EarlyStopping, ModelCheckpoint
SEED = 2334
torch.manual_seed(SEED)
np.random.seed(SEED)
from .lightning_module_template import LightningTemplateModel
def main(hparams):
"""
Main training routine specific for this project
:param hparams:
:return:
"""
# ------------------------
# 1 INIT LIGHTNING MODEL
# ------------------------
print('loading model...')
model = LightningTemplateModel(hparams)
print('model built')
# ------------------------
# 2 INIT TEST TUBE EXP
# ------------------------
# init experiment
exp = Experiment(
name=hyperparams.experiment_name,
save_dir=hyperparams.test_tube_save_path,
autosave=False,
description='test demo'
)
exp.argparse(hparams)
exp.save()
# ------------------------
# 3 DEFINE CALLBACKS
# ------------------------
model_save_path = '{}/{}/{}'.format(hparams.model_save_path, exp.name, exp.version)
early_stop = EarlyStopping(
monitor='val_acc',
patience=3,
verbose=True,
mode='max'
)
checkpoint = ModelCheckpoint(
filepath=model_save_path,
save_best_only=True,
verbose=True,
monitor='val_loss',
mode='min'
)
# ------------------------
# 4 INIT TRAINER
# ------------------------
trainer = Trainer(
experiment=exp,
checkpoint_callback=checkpoint,
early_stop_callback=early_stop,
gpus=hparams.gpus,
)
# ------------------------
# 5 START TRAINING
# ------------------------
trainer.fit(model)
if __name__ == '__main__':
# dirs
root_dir = os.path.dirname(os.path.realpath(__file__))
demo_log_dir = os.path.join(root_dir, 'pt_lightning_demo_logs')
checkpoint_dir = os.path.join(demo_log_dir, 'model_weights')
test_tube_dir = os.path.join(demo_log_dir, 'test_tube_data')
# although we user hyperOptParser, we are using it only as argparse right now
parent_parser = HyperOptArgumentParser(strategy='grid_search', add_help=False)
# gpu args
parent_parser.add_argument('--gpus', type=str, default='-1', help='how many gpus to use in the node. -1 uses all the gpus on the node')
parent_parser.add_argument('--test_tube_save_path', type=str, default=test_tube_dir, help='where to save logs')
parent_parser.add_argument('--model_save_path', type=str, default=checkpoint_dir, help='where to save model')
parent_parser.add_argument('--experiment_name', type=str, default='pt_lightning_exp_a', help='test tube exp name')
# allow model to overwrite or extend args
parser = LightningTemplateModel.add_model_specific_args(parent_parser, root_dir)
hyperparams = parser.parse_args()
# ---------------------
# RUN TRAINING
# ---------------------
# run on HPC cluster
print(f'RUNNING INTERACTIVE MODE ON GPUS. gpu ids: {hyperparams.gpus}')
main(hyperparams)
| 29.587156
| 139
| 0.619845
|
bb2422c6c8a145617092c1461737f95164def3e0
| 15,237
|
py
|
Python
|
static/scripts/change_hostname/change_gluu_host.py
|
zeroluck/community-edition-setup
|
36b8c6c31ff3019717a520db03d909ef1e59cc98
|
[
"MIT"
] | null | null | null |
static/scripts/change_hostname/change_gluu_host.py
|
zeroluck/community-edition-setup
|
36b8c6c31ff3019717a520db03d909ef1e59cc98
|
[
"MIT"
] | null | null | null |
static/scripts/change_hostname/change_gluu_host.py
|
zeroluck/community-edition-setup
|
36b8c6c31ff3019717a520db03d909ef1e59cc98
|
[
"MIT"
] | null | null | null |
import sys
import os
import json
import argparse
from ldap3 import Server, Connection, SUBTREE, BASE, LEVEL, \
MODIFY_REPLACE, MODIFY_ADD, MODIFY_DELETE
def modify_etc_hosts(host_ip, old_hosts, old_host):
hosts = {
'ipv4':{},
'ipv6':{},
}
for l in old_hosts:
ls=l.strip()
if ls:
if not ls[0]=='#':
if ls[0]==':':
h_type='ipv6'
else:
h_type='ipv4'
lss = ls.split()
ip_addr = lss[0]
if not ip_addr in hosts[h_type]:
hosts[h_type][ip_addr]=[]
for h in lss[1:]:
if (not h in hosts[h_type][ip_addr]) and (h!=old_host):
hosts[h_type][ip_addr].append(h)
for h,i in host_ip:
if h in hosts['ipv4']['127.0.0.1']:
hosts['ipv4']['127.0.0.1'].remove(h)
for h,i in host_ip:
if h in hosts['ipv6']['::1']:
hosts['ipv6']['::1'].remove(h)
for h,i in host_ip:
if i in hosts['ipv4']:
if not h in hosts['ipv4'][i]:
hosts['ipv4'][i].append(h)
else:
hosts['ipv4'][i] = [h]
hostse = ''
for iptype in hosts:
for ipaddr in hosts[iptype]:
host_list = [ipaddr] + hosts[iptype][ipaddr]
hl = "\t".join(host_list)
hostse += hl +'\n'
return hostse
class Installer:
def __init__(self, c, gluu_version, server_os):
self.c = c
self.gluu_version = gluu_version
self.server_os = server_os
if not hasattr(self.c, 'fake_remote'):
self.container = '/opt/gluu-server-{}'.format(gluu_version)
if ('Ubuntu' in self.server_os) or ('Debian' in self.server_os):
self.run_command = 'chroot {} /bin/bash -c "{}"'.format(self.container,'{}')
self.install_command = 'chroot {} /bin/bash -c "apt-get install -y {}"'.format(self.container,'{}')
elif 'CentOS' in self.server_os:
self.run_command = ('ssh -o IdentityFile=/etc/gluu/keys/gluu-console '
'-o Port=60022 -o LogLevel=QUIET -o '
'StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null '
'-o PubkeyAuthentication=yes root@localhost \'{}\''
)
self.install_command = self.run_command.format('yum install -y {}')
else:
self.run_command = '{}'
def run(self, cmd):
print "Executing:", cmd
run_cmd = self.run_command.format(cmd)
return self.c.run(run_cmd)
def install(self, package):
run_cmd = self.install_command.format(package)
print "Executing:", run_cmd
return self.c.run(run_cmd)
#Fake RemoteClient
class FakeRemote:
"""Provides fake remote class with the same run() function.
"""
def run(self, cmd):
"""This method executes cmd as a sub-process.
Args:
cmd (string): commands to run locally
Returns:
Standard input, output and error of command
"""
print cmd
cin, cout, cerr = os.popen3(cmd)
return '', cout.read(), cerr.read()
def put_file(self, filename, filecontent):
with open(filename, 'w') as f:
f.write(filecontent)
def exists(self, path):
return os.path.exists(path)
def rename(self, oldname, newname):
os.rename(oldname, newname)
def get_file(self, filename):
return True, open(filename)
class ChangeGluuHostname:
def __init__(self, old_host, new_host, cert_city, cert_mail, cert_state,
cert_country, ldap_password, os_type, ip_address, server='localhost',
gluu_version='',
local=True):
self.old_host = old_host
self.new_host = new_host
self.ip_address = ip_address
self.cert_city = cert_city
self.cert_mail = cert_mail
self.cert_state = cert_state
self.cert_country = cert_country
self.server = server
self.ldap_password = ldap_password
self.os_type = os_type
self.gluu_version = gluu_version
self.local = local
self.base_inum = None
self.appliance_inum = None
def startup(self):
if self.local:
ldap_server = 'localhost'
else:
ldap_server = self.server
ldap_server = Server("ldaps://{}:1636".format(self.server), use_ssl=True)
self.conn = Connection(ldap_server, user="cn=directory manager", password=self.ldap_password)
r = self.conn.bind()
if not r:
print "Can't conect to LDAP Server"
return False
self.container = '/opt/gluu-server-{}'.format(self.gluu_version)
if not self.local:
print "NOT LOCAL?"
sys.path.append("..")
from clustermgr.core.remote import RemoteClient
self.c = RemoteClient(self.server)
self.c.startup()
else:
self.c = FakeRemote()
if os.path.exists('/etc/gluu/conf/ox-ldap.properties'):
self.container = '/'
self.c.fake_remote = True
self.installer = Installer(self.c, self.gluu_version, self.os_type)
self.appliance_inum = self.get_appliance_inum()
self.base_inum = self.get_base_inum()
return True
def get_appliance_inum(self):
self.conn.search(search_base='ou=appliances,o=gluu',
search_filter='(objectclass=*)',
search_scope=SUBTREE, attributes=['inum'])
for r in self.conn.response:
if r['attributes']['inum']:
return r['attributes']['inum'][0]
def get_base_inum(self):
self.conn.search(search_base='o=gluu',
search_filter='(objectclass=gluuOrganization)',
search_scope=SUBTREE, attributes=['o'])
for r in self.conn.response:
if r['attributes']['o']:
return r['attributes']['o'][0]
def change_appliance_config(self):
print "Changing LDAP Applience configurations"
config_dn = 'ou=configuration,inum={},ou=appliances,o=gluu'.format(
self.appliance_inum)
for dns, cattr in (
('', 'oxIDPAuthentication'),
('oxauth', 'oxAuthConfDynamic'),
('oxidp', 'oxConfApplication'),
('oxtrust', 'oxTrustConfApplication'),
):
if dns:
dn = 'ou={},{}'.format(dns, config_dn)
else:
dn = 'inum={},ou=appliances,o=gluu'.format(self.appliance_inum)
self.conn.search(search_base=dn,
search_filter='(objectClass=*)',
search_scope=BASE, attributes=[cattr])
config_data = json.loads(self.conn.response[0]['attributes'][cattr][0])
for k in config_data:
kVal = config_data[k]
if type(kVal) == type(u''):
if self.old_host in kVal:
kVal=kVal.replace(self.old_host, self.new_host)
config_data[k]=kVal
config_data = json.dumps(config_data)
self.conn.modify(dn, {cattr: [MODIFY_REPLACE, config_data]})
def change_clients(self):
print "Changing LDAP Clients configurations"
dn = "ou=clients,o={},o=gluu".format(self.base_inum)
self.conn.search(search_base=dn,
search_filter='(objectClass=oxAuthClient)',
search_scope=SUBTREE, attributes=[
'oxAuthPostLogoutRedirectURI',
'oxAuthRedirectURI',
'oxClaimRedirectURI',
])
result = self.conn.response[0]['attributes']
dn = self.conn.response[0]['dn']
for atr in result:
for i in range(len(result[atr])):
changeAttr = False
if self.old_host in result[atr][i]:
changeAttr = True
result[atr][i] = result[atr][i].replace(self.old_host, self.new_host)
self.conn.modify(dn, {atr: [MODIFY_REPLACE, result[atr]]})
def change_uma(self):
print "Changing LDAP UMA Configurations"
for ou, cattr in (
('resources','oxResource'),
('scopes', 'oxId'),
):
dn = "ou={},ou=uma,o={},o=gluu".format(ou, self.base_inum)
self.conn.search(search_base=dn, search_filter='(objectClass=*)', search_scope=SUBTREE, attributes=[cattr])
result = self.conn.response
for r in result:
for i in range(len( r['attributes'][cattr])):
changeAttr = False
if self.old_host in r['attributes'][cattr][i]:
r['attributes'][cattr][i] = r['attributes'][cattr][i].replace(self.old_host, self.new_host)
self.conn.modify(r['dn'], {cattr: [MODIFY_REPLACE, r['attributes'][cattr]]})
def change_httpd_conf(self):
print "Changing httpd configurations"
if 'CentOS' in self.os_type:
httpd_conf = os.path.join(self.container, 'etc/httpd/conf/httpd.conf')
https_gluu = os.path.join(self.container, 'etc/httpd/conf.d/https_gluu.conf')
conf_files = [httpd_conf, https_gluu]
elif 'Ubuntu' in self.os_type:
https_gluu = os.path.join(self.container, 'etc/apache2/sites-available/https_gluu.conf')
conf_files = [https_gluu]
for conf_file in conf_files:
result, fileObj = self.c.get_file(conf_file)
if result:
config_text = fileObj.read()
config_text = config_text.replace(self.old_host, self.new_host)
self.c.put_file(conf_file, config_text)
def create_new_certs(self):
print "Backing up certificates"
cmd_list = [
'mkdir /etc/certs/backup',
'cp /etc/certs/* /etc/certs/backup'
]
for cmd in cmd_list:
print self.installer.run(cmd)
print "Creating certificates"
cmd_list = [
'/usr/bin/openssl genrsa -des3 -out /etc/certs/{0}.key.orig -passout pass:secret 2048',
'/usr/bin/openssl rsa -in /etc/certs/{0}.key.orig -passin pass:secret -out /etc/certs/{0}.key',
'/usr/bin/openssl req -new -key /etc/certs/{0}.key -out /etc/certs/{0}.csr -subj '
'"/C={4}/ST={5}/L={1}/O=Gluu/CN={2}/emailAddress={3}"'.format('{0}', self.cert_city, self.new_host, self.cert_mail, self.cert_country, self.cert_state),
'/usr/bin/openssl x509 -req -days 365 -in /etc/certs/{0}.csr -signkey /etc/certs/{0}.key -out /etc/certs/{0}.crt',
'chmod 440 -R /etc/certs',
'chown root:gluu -R /etc/certs/',
'chown jetty:jetty /etc/certs/oxauth-keys*'
]
cert_list = ['httpd', 'idp-encryption', 'idp-signing', 'shibIDP', 'opendj', 'passport-sp']
for crt in cert_list:
for cmd in cmd_list:
cmd = cmd.format(crt)
print self.installer.run(cmd)
if not crt == 'saml.pem':
del_key = ( '/opt/jre/bin/keytool -delete -alias {}_{} -keystore '
'/opt/jre/jre/lib/security/cacerts -storepass changeit').format(self.old_host, crt)
r = self.installer.run(del_key)
#if r[1]:
# print "Info:", r[1]
#if r[2]:
# print "** ERROR:", r[2]
add_key = ('/opt/jre/bin/keytool -import -trustcacerts -alias '
'{0}_{1} -file /etc/certs/{2}.crt -keystore '
'/opt/jre/jre/lib/security/cacerts -storepass changeit -noprompt').format(self.new_host, crt, crt)
r = self.installer.run(add_key)
#if r[1]:
# print "Info:", r[1]
#if r[2]:
# print "** ERROR:", r[2]
self.installer.run('chown jetty:jetty /etc/certs/oxauth-keys.*')
def modify_saml_passport(self):
print "Modifying SAML & Passport if installed"
files = [
'/opt/gluu-server-{0}/opt/shibboleth-idp/conf/idp.properties'.format(self.gluu_version),
'/opt/gluu-server-{0}/etc/gluu/conf/passport-config.json'.format(self.gluu_version),
]
for fn in files:
if self.c.exists(fn):
print "Modifying Shibboleth {0}".format(fn)
r = self.c.get_file(fn)
if r[0]:
f = r[1].read()
f = f.replace(self.old_host, self.new_host)
print self.c.put_file(fn, f)
def change_host_name(self):
print "Changing hostname"
hostname_file = os.path.join(self.container, 'etc/hostname')
print self.c.put_file(hostname_file, self.new_host)
def modify_etc_hosts(self):
print "Modifying /etc/hosts"
hosts_file = os.path.join(self.container, 'etc/hosts')
r = self.c.get_file(hosts_file)
if r[0]:
old_hosts = r[1]
news_hosts = modify_etc_hosts([(self.new_host, self.ip_address)], old_hosts, self.old_host)
print self.c.put_file(hosts_file, news_hosts)
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('-old', required=True, help="Old hostanme")
parser.add_argument('-new', required=True, help="New hostname")
parser.add_argument('-server', required=True, help="Hostname or IP for LDAP and ssh")
parser.add_argument('-mail', required=True, help="Email of admin")
parser.add_argument('-city', required=True, help="City for creating certificates")
parser.add_argument('-state', required=True, help="State for creating certificates")
parser.add_argument('-country', required=True, help="Country for creating certificates")
parser.add_argument('-password', required=True, help="Admin password")
parser.add_argument('-os', required=True, help="OS type: CentOS, Ubuntu", choices=['CentOS','Ubuntu'])
args = parser.parse_args()
name_changer = ChangeGluuHostname(
old_host=args.old,
new_host=args.new,
cert_city=args.city,
cert_mail=args.mail,
cert_state=args.state,
cert_country=args.country,
server=args.server,
ldap_password=args.password,
os_type=args.os
)
name_changer.startup()
name_changer.change_appliance_config()
name_changer.change_clients()
name_changer.change_uma()
name_changer.change_httpd_conf()
name_changer.create_new_certs()
name_changer.change_host_name()
name_changer.modify_saml_passport()
| 35.683841
| 164
| 0.550305
|
64e759394a9b99bf20e661245f0920366cebe3d9
| 3,620
|
py
|
Python
|
fase.py
|
mrcloma/pythonbirds
|
6368a5c1d4f0085b242404d6669cca5156430e9d
|
[
"MIT"
] | null | null | null |
fase.py
|
mrcloma/pythonbirds
|
6368a5c1d4f0085b242404d6669cca5156430e9d
|
[
"MIT"
] | null | null | null |
fase.py
|
mrcloma/pythonbirds
|
6368a5c1d4f0085b242404d6669cca5156430e9d
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from itertools import chain
from atores import ATIVO
VITORIA = 'VITORIA'
DERROTA = 'DERROTA'
EM_ANDAMENTO = 'EM_ANDAMENTO'
class Ponto():
def __init__(self, x, y, caracter):
self.caracter = caracter
self.x = round(x)
self.y = round(y)
def __eq__(self, other):
return self.x == other.x and self.y == other.y and self.caracter == other.caracter
def __hash__(self):
return hash(self.x) ^ hash(self.y)
def __repr__(self, *args, **kwargs):
return "Ponto(%s,%s,'%s')" % (self.x, self.y, self.caracter)
class Fase():
def __init__(self, intervalo_de_colisao=1):
"""
Método que inicializa uma fase.
:param intervalo_de_colisao:
"""
self.intervalo_de_colisao = intervalo_de_colisao
self._passaros = []
self._porcos = []
self._obstaculos = []
def adicionar_obstaculo(self, *obstaculos):
"""
Adiciona obstáculos em uma fase
:param obstaculos:
"""
self._obstaculos.extend(obstaculos)
def adicionar_porco(self, *porcos):
"""
Adiciona porcos em uma fase
:param porcos:
"""
self._porcos.extend(porcos)
def adicionar_passaro(self, *passaros):
"""
Adiciona pássaros em uma fase
:param passaros:
"""
self._passaros.extend(passaros)
def status(self):
"""
Método que indica com mensagem o status do jogo
Se o jogo está em andamento (ainda tem porco ativo e pássaro ativo), retorna essa mensagem.
Se o jogo acabou com derrota (ainda existe porco ativo), retorna essa mensagem
Se o jogo acabou com vitória (não existe porco ativo), retorna essa mensagem
:return:
"""
if not self._possui_porco_ativo():
return VITORIA
elif self._possui_porco_ativo() and self._possui_passaros_ativos():
return EM_ANDAMENTO
else:
return DERROTA
def lancar(self, angulo, tempo):
"""
Método que executa lógica de lançamento.
Deve escolher o primeiro pássaro não lançado da lista e chamar seu método lançar
Se não houver esse tipo de pássaro, não deve fazer nada
:param angulo: ângulo de lançamento
:param tempo: Tempo de lançamento
"""
for passaro in self._passaros:
if not passaro.foi_lancado():
passaro.lancar(angulo, tempo)
break
def calcular_pontos(self, tempo):
"""
Lógica que retorna os pontos a serem exibidos na tela.
Cada ator deve ser transformado em um Ponto.
:param tempo: tempo para o qual devem ser calculados os pontos
:return: objeto do tipo Ponto
"""
for passaro in self._passaros:
passaro.calcular_posicao(tempo)
for alvo in self._obstaculos + self._porcos:
passaro.colidir(alvo, self.intervalo_de_colisao)
passaro.colidir_com_chao()
pontos=[self._transformar_em_ponto(a) for a in self._passaros+self._obstaculos+self._porcos]
return pontos
def _transformar_em_ponto(self, ator):
return Ponto(ator.x, ator.y, ator.caracter())
def _possui_porco_ativo(self):
for porco in self._porcos:
if porco.status == ATIVO:
return True
return False
def _possui_passaros_ativos(self):
for passaro in self._passaros:
if passaro.status == ATIVO:
return True
return False
| 27.014925
| 100
| 0.604696
|
40b982f3d5b999a926d08e4b28b34f73678bf671
| 685
|
py
|
Python
|
create_new_blog.py
|
gcrth/gcrth.github.io
|
061e4e9ee77f106e41d97b0cde8fb595a290884b
|
[
"Apache-2.0"
] | null | null | null |
create_new_blog.py
|
gcrth/gcrth.github.io
|
061e4e9ee77f106e41d97b0cde8fb595a290884b
|
[
"Apache-2.0"
] | null | null | null |
create_new_blog.py
|
gcrth/gcrth.github.io
|
061e4e9ee77f106e41d97b0cde8fb595a290884b
|
[
"Apache-2.0"
] | null | null | null |
import time
import os
path_to_blog='_posts/'
time_short=time.strftime("%Y-%m-%d", time.localtime())
time_long=time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
# change here
blog_name_short='Replace bash'
blog_name=time_short+'-'+blog_name_short+'.markdown'
if not os.path.exists(path_to_blog) :
raise ValueError
if os.path.exists(path_to_blog+blog_name) :
raise ValueError
file=open(path_to_blog+blog_name,'w')
file.write('''---
layout: post
title: "'''+blog_name_short+'''"
subtitle: ""
date: '''+time_long+'''
author: "gcrth"
header-img: "img/post-bg-2015.jpg"
catalog: true
---
''')
os.makedirs('img/in-post/'+time_short+'-'+blog_name_short)
| 21.40625
| 62
| 0.681752
|
5e5d3b3f54ed60c789ed9e9358942054713c09da
| 787
|
py
|
Python
|
problems/dp/Solution5559.py
|
akalu/cs-problems-python
|
9b1bd8e3932be62135a38a77f955ded9a766b654
|
[
"MIT"
] | null | null | null |
problems/dp/Solution5559.py
|
akalu/cs-problems-python
|
9b1bd8e3932be62135a38a77f955ded9a766b654
|
[
"MIT"
] | null | null | null |
problems/dp/Solution5559.py
|
akalu/cs-problems-python
|
9b1bd8e3932be62135a38a77f955ded9a766b654
|
[
"MIT"
] | null | null | null |
""" You may recall that an array arr is a mountain array if and only if:
arr.length >= 3 There exists some index i (0-indexed) with 0 < i < arr.length
- 1 such that: arr[0] < arr[1] < ... < arr[i - 1] < arr[i] arr[i] > arr[i +
1] > ... > arr[arr.length - 1] Given an integer array nums, return the
minimum number of elements to remove to make nums a mountain array.
Example 1:
Input: nums = [1,3,1] Output: 0
Explanation: The array itself is a mountain
array so we do not need to remove any elements. Example 2:
Input: nums = [2,1,1,5,6,2,3,1] Output: 3
Explanation: One solution is to
remove the elements at indices 0, 1, and 5, making the array nums =
[1,5,6,3,1].
"""
class Solution5559:
pass
| 28.107143
| 80
| 0.604828
|
4fbe3e1b5ab2df2ed81c7d9dce67a77a4c94d95a
| 2,387
|
py
|
Python
|
saleor/graphql/product/tests/benchmark/test_collection.py
|
codefl/saleor
|
96613dfb04fd73c19d6793ad00d37f07bb376c3a
|
[
"CC-BY-4.0"
] | null | null | null |
saleor/graphql/product/tests/benchmark/test_collection.py
|
codefl/saleor
|
96613dfb04fd73c19d6793ad00d37f07bb376c3a
|
[
"CC-BY-4.0"
] | 12
|
2021-03-30T14:37:10.000Z
|
2022-03-12T00:58:16.000Z
|
saleor/graphql/product/tests/benchmark/test_collection.py
|
codefl/saleor
|
96613dfb04fd73c19d6793ad00d37f07bb376c3a
|
[
"CC-BY-4.0"
] | null | null | null |
import graphene
import pytest
from saleor.graphql.tests.utils import get_graphql_content
@pytest.mark.django_db
@pytest.mark.count_queries(autouse=False)
def test_collection_view(api_client, homepage_collection, count_queries):
query = """
fragment BasicProductFields on Product {
id
name
thumbnail {
url
alt
}
thumbnail2x: thumbnail(size: 510) {
url
}
}
fragment Price on TaxedMoney {
gross {
amount
currency
}
net {
amount
currency
}
}
fragment ProductPricingField on Product {
pricing {
onSale
priceRangeUndiscounted {
start {
...Price
}
stop {
...Price
}
}
priceRange {
start {
...Price
}
stop {
...Price
}
}
}
}
query Collection($id: ID!, $pageSize: Int) {
collection(id: $id) {
id
slug
name
seoDescription
seoTitle
backgroundImage {
url
}
}
products(first: $pageSize, filter: {collections: [$id]}) {
totalCount
edges {
node {
...BasicProductFields
...ProductPricingField
category {
id
name
}
}
}
pageInfo {
endCursor
hasNextPage
hasPreviousPage
startCursor
}
}
attributes(filter: {inCollection: $id}, first: 100) {
edges {
node {
id
name
slug
values {
id
name
slug
}
}
}
}
}
"""
variables = {
"pageSize": 100,
"id": graphene.Node.to_global_id("Collection", homepage_collection.pk),
}
get_graphql_content(api_client.post_graphql(query, variables))
| 22.308411
| 79
| 0.390029
|
36b0e05b43d81a7bd95ee4bc3c1a31f252c9b3ad
| 5,754
|
py
|
Python
|
fresh_tomatoes.py
|
wustzhy/movies_web
|
f3ba0353defcdceba5860e9ebc2f49224a2ee675
|
[
"MIT"
] | null | null | null |
fresh_tomatoes.py
|
wustzhy/movies_web
|
f3ba0353defcdceba5860e9ebc2f49224a2ee675
|
[
"MIT"
] | null | null | null |
fresh_tomatoes.py
|
wustzhy/movies_web
|
f3ba0353defcdceba5860e9ebc2f49224a2ee675
|
[
"MIT"
] | null | null | null |
import webbrowser
import os
import re
# Styles and scripting for the page
main_page_head = '''
<head>
<meta charset="utf-8">
<title>Fresh Tomatoes!</title>
<!-- Bootstrap 3 -->
<link rel="stylesheet" href="https://netdna.bootstrapcdn.com/bootstrap/3.1.0/css/bootstrap.min.css">
<link rel="stylesheet" href="https://netdna.bootstrapcdn.com/bootstrap/3.1.0/css/bootstrap-theme.min.css">
<script src="http://code.jquery.com/jquery-1.10.1.min.js"></script>
<script src="https://netdna.bootstrapcdn.com/bootstrap/3.1.0/js/bootstrap.min.js"></script>
<style type="text/css" media="screen">
body {
padding-top: 80px;
}
#trailer .modal-dialog {
margin-top: 200px;
width: 640px;
height: 480px;
}
.hanging-close {
position: absolute;
top: -12px;
right: -12px;
z-index: 9001;
}
#trailer-video {
width: 100%;
height: 100%;
}
.movie-tile {
margin-bottom: 20px;
padding-top: 20px;
}
.movie-tile:hover {
background-color: #EEE;
cursor: pointer;
}
.scale-media {
padding-bottom: 56.25%;
position: relative;
}
.scale-media iframe {
border: none;
height: 100%;
position: absolute;
width: 100%;
left: 0;
top: 0;
background-color: white;
}
</style>
<script type="text/javascript" charset="utf-8">
// Pause the video when the modal is closed
$(document).on('click', '.hanging-close, .modal-backdrop, .modal', function (event) {
// Remove the src so the player itself gets removed, as this is the only
// reliable way to ensure the video stops playing in IE
$("#trailer-video-container").empty();
});
// Start playing the video whenever the trailer modal is opened
$(document).on('click', '.movie-tile', function (event) {
var trailerSrc = $(this).attr('data-trailer_src')
var sourceUrl = trailerSrc + '?autoplay=1&html5=1';
$("#trailer-video-container").empty().append($("<iframe></iframe>", {
'id': 'trailer-video',
'type': 'text-html',
'src': sourceUrl,
'frameborder': 0
}));
});
// Animate in the movies when the page loads
$(document).ready(function () {
$('.movie-tile').hide().first().show("fast", function showNext() {
$(this).next("div").show("fast", showNext);
});
});
</script>
</head>
'''
# The main page layout and title bar
main_page_content = '''
<!DOCTYPE html>
<html lang="en">
<body>
<!-- Trailer Video Modal -->
<div class="modal" id="trailer">
<div class="modal-dialog">
<div class="modal-content">
<a href="#" class="hanging-close" data-dismiss="modal" aria-hidden="true">
<img src="https://lh5.ggpht.com/v4-628SilF0HtHuHdu5EzxD7WRqOrrTIDi_MhEG6_qkNtUK5Wg7KPkofp_VJoF7RS2LhxwEFCO1ICHZlc-o_=s0#w=24&h=24"/>
</a>
<div class="scale-media" id="trailer-video-container">
</div>
</div>
</div>
</div>
<!-- Main Page Content -->
<div class="container">
<div class="navbar navbar-inverse navbar-fixed-top" role="navigation">
<div class="container">
<div class="navbar-header">
<a class="navbar-brand" href="#">Fresh Tomatoes Movie Trailers</a>
</div>
</div>
</div>
</div>
<div class="container">
{movie_tiles}
</div>
</body>
</html>
'''
# A single movie entry html template
movie_tile_content = '''
<div class="col-md-6 col-lg-4 movie-tile text-center" data-trailer_src="{trailer_src}" data-toggle="modal" data-target="#trailer">
<img src="{poster_image_url}" width="220" height="342">
<h2>{movie_title}</h2>
</div>
'''
def create_movie_tiles_content(movies):
# The HTML content for this section of the page
content = ''
for movie in movies:
# Extract the youku or youtube ID from the url
trailer_url = movie.trailer_url
youku_id_match = re.search(r'(id_)(\w+=*)(\.html)', trailer_url)
youtube_id_match = re.search(r'(?<=v=)[^&#]+', trailer_url)
youtube_id_match = youtube_id_match or re.search(r'(?<=be/)[^&#]+', trailer_url)
trailer_youku_id = youku_id_match.group(2) if youku_id_match else None
trailer_youtube_id = youtube_id_match.group(0) if youtube_id_match else None
if trailer_youku_id != None:
trailer_src = 'http://player.youku.com/embed/' + trailer_youku_id
else:
trailer_src = 'http://youtube.com/embed/'+ trailer_youtube_id
# Append the tile for the movie with its content filled in
content += movie_tile_content.format(
movie_title=movie.title,
poster_image_url=movie.poster_image_url,
trailer_src=trailer_src
)
return content
def open_movies_page(movies):
# Create or overwrite the output file
output_file = open('fresh_tomatoes.html', 'w')
# Replace the placeholder for the movie tiles with the actual dynamically generated content
rendered_content = main_page_content.format(movie_tiles=create_movie_tiles_content(movies))
# Output the file
output_file.write(main_page_head + rendered_content)
output_file.close()
# open the output file in the browser
url = os.path.abspath(output_file.name)
webbrowser.open('file://' + url, new=2) # open in a new tab, if possible
| 34.662651
| 144
| 0.586548
|
cadfc9c22ac3cc44d3559309e30b0092c903daa4
| 151
|
py
|
Python
|
mach_cad/tools/magnet/document/__init__.py
|
Severson-Group/MachEval
|
dbb7999188133f8744636da53cab475ae538ce80
|
[
"BSD-3-Clause"
] | 6
|
2021-11-02T20:12:32.000Z
|
2021-11-13T10:50:35.000Z
|
mach_cad/tools/magnet/document/__init__.py
|
Severson-Group/MachEval
|
dbb7999188133f8744636da53cab475ae538ce80
|
[
"BSD-3-Clause"
] | 18
|
2021-11-29T20:14:55.000Z
|
2022-03-02T07:17:37.000Z
|
mach_cad/tools/magnet/document/__init__.py
|
Severson-Group/MachEval
|
dbb7999188133f8744636da53cab475ae538ce80
|
[
"BSD-3-Clause"
] | 1
|
2022-01-29T00:52:38.000Z
|
2022-01-29T00:52:38.000Z
|
from . import document
from . import view
from .document import*
from .view import*
__all__ = []
__all__ += document.__all__
__all__ += view.__all__
| 15.1
| 27
| 0.735099
|
2c1ef94fea31e281b3ecc57cf520c40a444da282
| 154
|
py
|
Python
|
phylopandas/treeio/__init__.py
|
harmsm/phylopandas
|
eb1e5efbbfe68f96b497816c923bc333489c4838
|
[
"BSD-3-Clause"
] | 57
|
2017-10-31T20:20:11.000Z
|
2022-03-09T14:27:12.000Z
|
phylopandas/treeio/__init__.py
|
harmsm/phylopandas
|
eb1e5efbbfe68f96b497816c923bc333489c4838
|
[
"BSD-3-Clause"
] | 19
|
2017-10-29T18:15:43.000Z
|
2021-09-24T12:59:00.000Z
|
phylopandas/treeio/__init__.py
|
harmsm/phylopandas
|
eb1e5efbbfe68f96b497816c923bc333489c4838
|
[
"BSD-3-Clause"
] | 22
|
2017-10-27T22:35:39.000Z
|
2022-02-18T07:50:24.000Z
|
from .read import (read_nexml,
read_nexus_tree,
read_newick,
read_dendropy)
from . import write
| 22
| 35
| 0.506494
|
e0b239df447395ed6dcac06fa37af6b7b34ac0ec
| 167,040
|
py
|
Python
|
nebula2/common/ttypes.py
|
Shylock-Hg/nebula-python
|
f17120b77adb6dd00aeb52de1abf783fcb9b4465
|
[
"Apache-2.0"
] | null | null | null |
nebula2/common/ttypes.py
|
Shylock-Hg/nebula-python
|
f17120b77adb6dd00aeb52de1abf783fcb9b4465
|
[
"Apache-2.0"
] | null | null | null |
nebula2/common/ttypes.py
|
Shylock-Hg/nebula-python
|
f17120b77adb6dd00aeb52de1abf783fcb9b4465
|
[
"Apache-2.0"
] | null | null | null |
#
# Autogenerated by Thrift
#
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
# @generated
#
from __future__ import absolute_import
import six
import sys
from nebula2.fbthrift.util.Recursive import fix_spec
from nebula2.fbthrift.Thrift import TType, TMessageType, TPriority, TRequestContext, TProcessorEventHandler, TServerInterface, TProcessor, TException, TApplicationException, UnimplementedTypedef
from nebula2.fbthrift.protocol.TProtocol import TProtocolException
import pprint
import warnings
from nebula2.fbthrift import Thrift
from nebula2.fbthrift.transport import TTransport
from nebula2.fbthrift.protocol import TBinaryProtocol
from nebula2.fbthrift.protocol import TCompactProtocol
from nebula2.fbthrift.protocol import THeaderProtocol
fastproto = None
try:
from nebula2.fbthrift.protocol import fastproto
except ImportError:
pass
all_structs = []
UTF8STRINGS = bool(0) or sys.version_info.major >= 3
__all__ = ['UTF8STRINGS', 'NullType', 'PropertyType', 'ErrorCode', 'SchemaID', 'Date', 'Time', 'DateTime', 'Value', 'NList', 'NMap', 'NSet', 'Row', 'DataSet', 'Coordinate', 'Point', 'LineString', 'Polygon', 'Geography', 'Tag', 'Vertex', 'Edge', 'Step', 'Path', 'HostAddr', 'KeyValue', 'LogInfo', 'DirInfo', 'NodeInfo', 'PartitionBackupInfo', 'CheckpointInfo', 'LogEntry', 'ClusterID', 'GraphSpaceID', 'PartitionID', 'TagID', 'EdgeType', 'EdgeRanking', 'LogID', 'TermID', 'Timestamp', 'IndexID', 'Port', 'SessionID', 'ExecutionPlanID']
class NullType:
__NULL__ = 0
NaN = 1
BAD_DATA = 2
BAD_TYPE = 3
ERR_OVERFLOW = 4
UNKNOWN_PROP = 5
DIV_BY_ZERO = 6
OUT_OF_RANGE = 7
_VALUES_TO_NAMES = {
0: "__NULL__",
1: "NaN",
2: "BAD_DATA",
3: "BAD_TYPE",
4: "ERR_OVERFLOW",
5: "UNKNOWN_PROP",
6: "DIV_BY_ZERO",
7: "OUT_OF_RANGE",
}
_NAMES_TO_VALUES = {
"__NULL__": 0,
"NaN": 1,
"BAD_DATA": 2,
"BAD_TYPE": 3,
"ERR_OVERFLOW": 4,
"UNKNOWN_PROP": 5,
"DIV_BY_ZERO": 6,
"OUT_OF_RANGE": 7,
}
class PropertyType:
UNKNOWN = 0
BOOL = 1
INT64 = 2
VID = 3
FLOAT = 4
DOUBLE = 5
STRING = 6
FIXED_STRING = 7
INT8 = 8
INT16 = 9
INT32 = 10
TIMESTAMP = 21
DATE = 24
DATETIME = 25
TIME = 26
GEOGRAPHY = 31
_VALUES_TO_NAMES = {
0: "UNKNOWN",
1: "BOOL",
2: "INT64",
3: "VID",
4: "FLOAT",
5: "DOUBLE",
6: "STRING",
7: "FIXED_STRING",
8: "INT8",
9: "INT16",
10: "INT32",
21: "TIMESTAMP",
24: "DATE",
25: "DATETIME",
26: "TIME",
31: "GEOGRAPHY",
}
_NAMES_TO_VALUES = {
"UNKNOWN": 0,
"BOOL": 1,
"INT64": 2,
"VID": 3,
"FLOAT": 4,
"DOUBLE": 5,
"STRING": 6,
"FIXED_STRING": 7,
"INT8": 8,
"INT16": 9,
"INT32": 10,
"TIMESTAMP": 21,
"DATE": 24,
"DATETIME": 25,
"TIME": 26,
"GEOGRAPHY": 31,
}
class ErrorCode:
SUCCEEDED = 0
E_DISCONNECTED = -1
E_FAIL_TO_CONNECT = -2
E_RPC_FAILURE = -3
E_LEADER_CHANGED = -4
E_SPACE_NOT_FOUND = -5
E_TAG_NOT_FOUND = -6
E_EDGE_NOT_FOUND = -7
E_INDEX_NOT_FOUND = -8
E_EDGE_PROP_NOT_FOUND = -9
E_TAG_PROP_NOT_FOUND = -10
E_ROLE_NOT_FOUND = -11
E_CONFIG_NOT_FOUND = -12
E_GROUP_NOT_FOUND = -13
E_ZONE_NOT_FOUND = -14
E_LISTENER_NOT_FOUND = -15
E_PART_NOT_FOUND = -16
E_KEY_NOT_FOUND = -17
E_USER_NOT_FOUND = -18
E_STATS_NOT_FOUND = -19
E_BACKUP_FAILED = -24
E_BACKUP_EMPTY_TABLE = -25
E_BACKUP_TABLE_FAILED = -26
E_PARTIAL_RESULT = -27
E_REBUILD_INDEX_FAILED = -28
E_INVALID_PASSWORD = -29
E_FAILED_GET_ABS_PATH = -30
E_BAD_USERNAME_PASSWORD = -1001
E_SESSION_INVALID = -1002
E_SESSION_TIMEOUT = -1003
E_SYNTAX_ERROR = -1004
E_EXECUTION_ERROR = -1005
E_STATEMENT_EMPTY = -1006
E_BAD_PERMISSION = -1008
E_SEMANTIC_ERROR = -1009
E_TOO_MANY_CONNECTIONS = -1010
E_PARTIAL_SUCCEEDED = -1011
E_NO_HOSTS = -2001
E_EXISTED = -2002
E_INVALID_HOST = -2003
E_UNSUPPORTED = -2004
E_NOT_DROP = -2005
E_BALANCER_RUNNING = -2006
E_CONFIG_IMMUTABLE = -2007
E_CONFLICT = -2008
E_INVALID_PARM = -2009
E_WRONGCLUSTER = -2010
E_STORE_FAILURE = -2021
E_STORE_SEGMENT_ILLEGAL = -2022
E_BAD_BALANCE_PLAN = -2023
E_BALANCED = -2024
E_NO_RUNNING_BALANCE_PLAN = -2025
E_NO_VALID_HOST = -2026
E_CORRUPTED_BALANCE_PLAN = -2027
E_NO_INVALID_BALANCE_PLAN = -2028
E_IMPROPER_ROLE = -2030
E_INVALID_PARTITION_NUM = -2031
E_INVALID_REPLICA_FACTOR = -2032
E_INVALID_CHARSET = -2033
E_INVALID_COLLATE = -2034
E_CHARSET_COLLATE_NOT_MATCH = -2035
E_SNAPSHOT_FAILURE = -2040
E_BLOCK_WRITE_FAILURE = -2041
E_REBUILD_INDEX_FAILURE = -2042
E_INDEX_WITH_TTL = -2043
E_ADD_JOB_FAILURE = -2044
E_STOP_JOB_FAILURE = -2045
E_SAVE_JOB_FAILURE = -2046
E_BALANCER_FAILURE = -2047
E_JOB_NOT_FINISHED = -2048
E_TASK_REPORT_OUT_DATE = -2049
E_JOB_NOT_IN_SPACE = -2050
E_INVALID_JOB = -2065
E_BACKUP_BUILDING_INDEX = -2066
E_BACKUP_SPACE_NOT_FOUND = -2067
E_RESTORE_FAILURE = -2068
E_SESSION_NOT_FOUND = -2069
E_LIST_CLUSTER_FAILURE = -2070
E_LIST_CLUSTER_GET_ABS_PATH_FAILURE = -2071
E_GET_META_DIR_FAILURE = -2072
E_QUERY_NOT_FOUND = -2073
E_CONSENSUS_ERROR = -3001
E_KEY_HAS_EXISTS = -3002
E_DATA_TYPE_MISMATCH = -3003
E_INVALID_FIELD_VALUE = -3004
E_INVALID_OPERATION = -3005
E_NOT_NULLABLE = -3006
E_FIELD_UNSET = -3007
E_OUT_OF_RANGE = -3008
E_ATOMIC_OP_FAILED = -3009
E_DATA_CONFLICT_ERROR = -3010
E_WRITE_STALLED = -3011
E_IMPROPER_DATA_TYPE = -3021
E_INVALID_SPACEVIDLEN = -3022
E_INVALID_FILTER = -3031
E_INVALID_UPDATER = -3032
E_INVALID_STORE = -3033
E_INVALID_PEER = -3034
E_RETRY_EXHAUSTED = -3035
E_TRANSFER_LEADER_FAILED = -3036
E_INVALID_STAT_TYPE = -3037
E_INVALID_VID = -3038
E_NO_TRANSFORMED = -3039
E_LOAD_META_FAILED = -3040
E_FAILED_TO_CHECKPOINT = -3041
E_CHECKPOINT_BLOCKED = -3042
E_FILTER_OUT = -3043
E_INVALID_DATA = -3044
E_MUTATE_EDGE_CONFLICT = -3045
E_MUTATE_TAG_CONFLICT = -3046
E_OUTDATED_LOCK = -3047
E_INVALID_TASK_PARA = -3051
E_USER_CANCEL = -3052
E_TASK_EXECUTION_FAILED = -3053
E_PLAN_IS_KILLED = -3060
E_NO_TERM = -3070
E_OUTDATED_TERM = -3071
E_OUTDATED_EDGE = -3072
E_WRITE_WRITE_CONFLICT = -3073
E_CLIENT_SERVER_INCOMPATIBLE = -3061
E_UNKNOWN = -8000
_VALUES_TO_NAMES = {
0: "SUCCEEDED",
-1: "E_DISCONNECTED",
-2: "E_FAIL_TO_CONNECT",
-3: "E_RPC_FAILURE",
-4: "E_LEADER_CHANGED",
-5: "E_SPACE_NOT_FOUND",
-6: "E_TAG_NOT_FOUND",
-7: "E_EDGE_NOT_FOUND",
-8: "E_INDEX_NOT_FOUND",
-9: "E_EDGE_PROP_NOT_FOUND",
-10: "E_TAG_PROP_NOT_FOUND",
-11: "E_ROLE_NOT_FOUND",
-12: "E_CONFIG_NOT_FOUND",
-13: "E_GROUP_NOT_FOUND",
-14: "E_ZONE_NOT_FOUND",
-15: "E_LISTENER_NOT_FOUND",
-16: "E_PART_NOT_FOUND",
-17: "E_KEY_NOT_FOUND",
-18: "E_USER_NOT_FOUND",
-19: "E_STATS_NOT_FOUND",
-24: "E_BACKUP_FAILED",
-25: "E_BACKUP_EMPTY_TABLE",
-26: "E_BACKUP_TABLE_FAILED",
-27: "E_PARTIAL_RESULT",
-28: "E_REBUILD_INDEX_FAILED",
-29: "E_INVALID_PASSWORD",
-30: "E_FAILED_GET_ABS_PATH",
-1001: "E_BAD_USERNAME_PASSWORD",
-1002: "E_SESSION_INVALID",
-1003: "E_SESSION_TIMEOUT",
-1004: "E_SYNTAX_ERROR",
-1005: "E_EXECUTION_ERROR",
-1006: "E_STATEMENT_EMPTY",
-1008: "E_BAD_PERMISSION",
-1009: "E_SEMANTIC_ERROR",
-1010: "E_TOO_MANY_CONNECTIONS",
-1011: "E_PARTIAL_SUCCEEDED",
-2001: "E_NO_HOSTS",
-2002: "E_EXISTED",
-2003: "E_INVALID_HOST",
-2004: "E_UNSUPPORTED",
-2005: "E_NOT_DROP",
-2006: "E_BALANCER_RUNNING",
-2007: "E_CONFIG_IMMUTABLE",
-2008: "E_CONFLICT",
-2009: "E_INVALID_PARM",
-2010: "E_WRONGCLUSTER",
-2021: "E_STORE_FAILURE",
-2022: "E_STORE_SEGMENT_ILLEGAL",
-2023: "E_BAD_BALANCE_PLAN",
-2024: "E_BALANCED",
-2025: "E_NO_RUNNING_BALANCE_PLAN",
-2026: "E_NO_VALID_HOST",
-2027: "E_CORRUPTED_BALANCE_PLAN",
-2028: "E_NO_INVALID_BALANCE_PLAN",
-2030: "E_IMPROPER_ROLE",
-2031: "E_INVALID_PARTITION_NUM",
-2032: "E_INVALID_REPLICA_FACTOR",
-2033: "E_INVALID_CHARSET",
-2034: "E_INVALID_COLLATE",
-2035: "E_CHARSET_COLLATE_NOT_MATCH",
-2040: "E_SNAPSHOT_FAILURE",
-2041: "E_BLOCK_WRITE_FAILURE",
-2042: "E_REBUILD_INDEX_FAILURE",
-2043: "E_INDEX_WITH_TTL",
-2044: "E_ADD_JOB_FAILURE",
-2045: "E_STOP_JOB_FAILURE",
-2046: "E_SAVE_JOB_FAILURE",
-2047: "E_BALANCER_FAILURE",
-2048: "E_JOB_NOT_FINISHED",
-2049: "E_TASK_REPORT_OUT_DATE",
-2050: "E_JOB_NOT_IN_SPACE",
-2065: "E_INVALID_JOB",
-2066: "E_BACKUP_BUILDING_INDEX",
-2067: "E_BACKUP_SPACE_NOT_FOUND",
-2068: "E_RESTORE_FAILURE",
-2069: "E_SESSION_NOT_FOUND",
-2070: "E_LIST_CLUSTER_FAILURE",
-2071: "E_LIST_CLUSTER_GET_ABS_PATH_FAILURE",
-2072: "E_GET_META_DIR_FAILURE",
-2073: "E_QUERY_NOT_FOUND",
-3001: "E_CONSENSUS_ERROR",
-3002: "E_KEY_HAS_EXISTS",
-3003: "E_DATA_TYPE_MISMATCH",
-3004: "E_INVALID_FIELD_VALUE",
-3005: "E_INVALID_OPERATION",
-3006: "E_NOT_NULLABLE",
-3007: "E_FIELD_UNSET",
-3008: "E_OUT_OF_RANGE",
-3009: "E_ATOMIC_OP_FAILED",
-3010: "E_DATA_CONFLICT_ERROR",
-3011: "E_WRITE_STALLED",
-3021: "E_IMPROPER_DATA_TYPE",
-3022: "E_INVALID_SPACEVIDLEN",
-3031: "E_INVALID_FILTER",
-3032: "E_INVALID_UPDATER",
-3033: "E_INVALID_STORE",
-3034: "E_INVALID_PEER",
-3035: "E_RETRY_EXHAUSTED",
-3036: "E_TRANSFER_LEADER_FAILED",
-3037: "E_INVALID_STAT_TYPE",
-3038: "E_INVALID_VID",
-3039: "E_NO_TRANSFORMED",
-3040: "E_LOAD_META_FAILED",
-3041: "E_FAILED_TO_CHECKPOINT",
-3042: "E_CHECKPOINT_BLOCKED",
-3043: "E_FILTER_OUT",
-3044: "E_INVALID_DATA",
-3045: "E_MUTATE_EDGE_CONFLICT",
-3046: "E_MUTATE_TAG_CONFLICT",
-3047: "E_OUTDATED_LOCK",
-3051: "E_INVALID_TASK_PARA",
-3052: "E_USER_CANCEL",
-3053: "E_TASK_EXECUTION_FAILED",
-3060: "E_PLAN_IS_KILLED",
-3070: "E_NO_TERM",
-3071: "E_OUTDATED_TERM",
-3072: "E_OUTDATED_EDGE",
-3073: "E_WRITE_WRITE_CONFLICT",
-3061: "E_CLIENT_SERVER_INCOMPATIBLE",
-8000: "E_UNKNOWN",
}
_NAMES_TO_VALUES = {
"SUCCEEDED": 0,
"E_DISCONNECTED": -1,
"E_FAIL_TO_CONNECT": -2,
"E_RPC_FAILURE": -3,
"E_LEADER_CHANGED": -4,
"E_SPACE_NOT_FOUND": -5,
"E_TAG_NOT_FOUND": -6,
"E_EDGE_NOT_FOUND": -7,
"E_INDEX_NOT_FOUND": -8,
"E_EDGE_PROP_NOT_FOUND": -9,
"E_TAG_PROP_NOT_FOUND": -10,
"E_ROLE_NOT_FOUND": -11,
"E_CONFIG_NOT_FOUND": -12,
"E_GROUP_NOT_FOUND": -13,
"E_ZONE_NOT_FOUND": -14,
"E_LISTENER_NOT_FOUND": -15,
"E_PART_NOT_FOUND": -16,
"E_KEY_NOT_FOUND": -17,
"E_USER_NOT_FOUND": -18,
"E_STATS_NOT_FOUND": -19,
"E_BACKUP_FAILED": -24,
"E_BACKUP_EMPTY_TABLE": -25,
"E_BACKUP_TABLE_FAILED": -26,
"E_PARTIAL_RESULT": -27,
"E_REBUILD_INDEX_FAILED": -28,
"E_INVALID_PASSWORD": -29,
"E_FAILED_GET_ABS_PATH": -30,
"E_BAD_USERNAME_PASSWORD": -1001,
"E_SESSION_INVALID": -1002,
"E_SESSION_TIMEOUT": -1003,
"E_SYNTAX_ERROR": -1004,
"E_EXECUTION_ERROR": -1005,
"E_STATEMENT_EMPTY": -1006,
"E_BAD_PERMISSION": -1008,
"E_SEMANTIC_ERROR": -1009,
"E_TOO_MANY_CONNECTIONS": -1010,
"E_PARTIAL_SUCCEEDED": -1011,
"E_NO_HOSTS": -2001,
"E_EXISTED": -2002,
"E_INVALID_HOST": -2003,
"E_UNSUPPORTED": -2004,
"E_NOT_DROP": -2005,
"E_BALANCER_RUNNING": -2006,
"E_CONFIG_IMMUTABLE": -2007,
"E_CONFLICT": -2008,
"E_INVALID_PARM": -2009,
"E_WRONGCLUSTER": -2010,
"E_STORE_FAILURE": -2021,
"E_STORE_SEGMENT_ILLEGAL": -2022,
"E_BAD_BALANCE_PLAN": -2023,
"E_BALANCED": -2024,
"E_NO_RUNNING_BALANCE_PLAN": -2025,
"E_NO_VALID_HOST": -2026,
"E_CORRUPTED_BALANCE_PLAN": -2027,
"E_NO_INVALID_BALANCE_PLAN": -2028,
"E_IMPROPER_ROLE": -2030,
"E_INVALID_PARTITION_NUM": -2031,
"E_INVALID_REPLICA_FACTOR": -2032,
"E_INVALID_CHARSET": -2033,
"E_INVALID_COLLATE": -2034,
"E_CHARSET_COLLATE_NOT_MATCH": -2035,
"E_SNAPSHOT_FAILURE": -2040,
"E_BLOCK_WRITE_FAILURE": -2041,
"E_REBUILD_INDEX_FAILURE": -2042,
"E_INDEX_WITH_TTL": -2043,
"E_ADD_JOB_FAILURE": -2044,
"E_STOP_JOB_FAILURE": -2045,
"E_SAVE_JOB_FAILURE": -2046,
"E_BALANCER_FAILURE": -2047,
"E_JOB_NOT_FINISHED": -2048,
"E_TASK_REPORT_OUT_DATE": -2049,
"E_JOB_NOT_IN_SPACE": -2050,
"E_INVALID_JOB": -2065,
"E_BACKUP_BUILDING_INDEX": -2066,
"E_BACKUP_SPACE_NOT_FOUND": -2067,
"E_RESTORE_FAILURE": -2068,
"E_SESSION_NOT_FOUND": -2069,
"E_LIST_CLUSTER_FAILURE": -2070,
"E_LIST_CLUSTER_GET_ABS_PATH_FAILURE": -2071,
"E_GET_META_DIR_FAILURE": -2072,
"E_QUERY_NOT_FOUND": -2073,
"E_CONSENSUS_ERROR": -3001,
"E_KEY_HAS_EXISTS": -3002,
"E_DATA_TYPE_MISMATCH": -3003,
"E_INVALID_FIELD_VALUE": -3004,
"E_INVALID_OPERATION": -3005,
"E_NOT_NULLABLE": -3006,
"E_FIELD_UNSET": -3007,
"E_OUT_OF_RANGE": -3008,
"E_ATOMIC_OP_FAILED": -3009,
"E_DATA_CONFLICT_ERROR": -3010,
"E_WRITE_STALLED": -3011,
"E_IMPROPER_DATA_TYPE": -3021,
"E_INVALID_SPACEVIDLEN": -3022,
"E_INVALID_FILTER": -3031,
"E_INVALID_UPDATER": -3032,
"E_INVALID_STORE": -3033,
"E_INVALID_PEER": -3034,
"E_RETRY_EXHAUSTED": -3035,
"E_TRANSFER_LEADER_FAILED": -3036,
"E_INVALID_STAT_TYPE": -3037,
"E_INVALID_VID": -3038,
"E_NO_TRANSFORMED": -3039,
"E_LOAD_META_FAILED": -3040,
"E_FAILED_TO_CHECKPOINT": -3041,
"E_CHECKPOINT_BLOCKED": -3042,
"E_FILTER_OUT": -3043,
"E_INVALID_DATA": -3044,
"E_MUTATE_EDGE_CONFLICT": -3045,
"E_MUTATE_TAG_CONFLICT": -3046,
"E_OUTDATED_LOCK": -3047,
"E_INVALID_TASK_PARA": -3051,
"E_USER_CANCEL": -3052,
"E_TASK_EXECUTION_FAILED": -3053,
"E_PLAN_IS_KILLED": -3060,
"E_NO_TERM": -3070,
"E_OUTDATED_TERM": -3071,
"E_OUTDATED_EDGE": -3072,
"E_WRITE_WRITE_CONFLICT": -3073,
"E_CLIENT_SERVER_INCOMPATIBLE": -3061,
"E_UNKNOWN": -8000,
}
class SchemaID(object):
"""
Attributes:
- tag_id
- edge_type
"""
thrift_spec = None
__init__ = None
__EMPTY__ = 0
TAG_ID = 1
EDGE_TYPE = 2
@staticmethod
def isUnion():
return True
def get_tag_id(self):
assert self.field == 1
return self.value
def get_edge_type(self):
assert self.field == 2
return self.value
def set_tag_id(self, value):
self.field = 1
self.value = value
def set_edge_type(self, value):
self.field = 2
self.value = value
def getType(self):
return self.field
def __repr__(self):
value = pprint.pformat(self.value)
member = ''
if self.field == 1:
padding = ' ' * 7
value = padding.join(value.splitlines(True))
member = '\n %s=%s' % ('tag_id', value)
if self.field == 2:
padding = ' ' * 10
value = padding.join(value.splitlines(True))
member = '\n %s=%s' % ('edge_type', value)
return "%s(%s)" % (self.__class__.__name__, member)
def read(self, iprot):
self.field = 0
self.value = None
if (isinstance(iprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, True], utf8strings=UTF8STRINGS, protoid=0)
return
if (isinstance(iprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, True], utf8strings=UTF8STRINGS, protoid=2)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
tag_id = iprot.readI32()
assert self.field == 0 and self.value is None
self.set_tag_id(tag_id)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I32:
edge_type = iprot.readI32()
assert self.field == 0 and self.value is None
self.set_edge_type(edge_type)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (isinstance(oprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, True], utf8strings=UTF8STRINGS, protoid=0))
return
if (isinstance(oprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, True], utf8strings=UTF8STRINGS, protoid=2))
return
oprot.writeUnionBegin('SchemaID')
if self.field == 1:
oprot.writeFieldBegin('tag_id', TType.I32, 1)
tag_id = self.value
oprot.writeI32(tag_id)
oprot.writeFieldEnd()
if self.field == 2:
oprot.writeFieldBegin('edge_type', TType.I32, 2)
edge_type = self.value
oprot.writeI32(edge_type)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeUnionEnd()
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class Date:
"""
Attributes:
- year
- month
- day
"""
thrift_spec = None
thrift_field_annotations = None
thrift_struct_annotations = None
__init__ = None
@staticmethod
def isUnion():
return False
def read(self, iprot):
if (isinstance(iprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0)
return
if (isinstance(iprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I16:
self.year = iprot.readI16()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.BYTE:
self.month = iprot.readByte()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.BYTE:
self.day = iprot.readByte()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (isinstance(oprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0))
return
if (isinstance(oprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2))
return
oprot.writeStructBegin('Date')
if self.year != None:
oprot.writeFieldBegin('year', TType.I16, 1)
oprot.writeI16(self.year)
oprot.writeFieldEnd()
if self.month != None:
oprot.writeFieldBegin('month', TType.BYTE, 2)
oprot.writeByte(self.month)
oprot.writeFieldEnd()
if self.day != None:
oprot.writeFieldBegin('day', TType.BYTE, 3)
oprot.writeByte(self.day)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def __repr__(self):
L = []
padding = ' ' * 4
if self.year is not None:
value = pprint.pformat(self.year, indent=0)
value = padding.join(value.splitlines(True))
L.append(' year=%s' % (value))
if self.month is not None:
value = pprint.pformat(self.month, indent=0)
value = padding.join(value.splitlines(True))
L.append(' month=%s' % (value))
if self.day is not None:
value = pprint.pformat(self.day, indent=0)
value = padding.join(value.splitlines(True))
L.append(' day=%s' % (value))
return "%s(%s)" % (self.__class__.__name__, "\n" + ",\n".join(L) if L else '')
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
# Override the __hash__ function for Python3 - t10434117
if not six.PY2:
__hash__ = object.__hash__
class Time:
"""
Attributes:
- hour
- minute
- sec
- microsec
"""
thrift_spec = None
thrift_field_annotations = None
thrift_struct_annotations = None
__init__ = None
@staticmethod
def isUnion():
return False
def read(self, iprot):
if (isinstance(iprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0)
return
if (isinstance(iprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.BYTE:
self.hour = iprot.readByte()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.BYTE:
self.minute = iprot.readByte()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.BYTE:
self.sec = iprot.readByte()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.I32:
self.microsec = iprot.readI32()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (isinstance(oprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0))
return
if (isinstance(oprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2))
return
oprot.writeStructBegin('Time')
if self.hour != None:
oprot.writeFieldBegin('hour', TType.BYTE, 1)
oprot.writeByte(self.hour)
oprot.writeFieldEnd()
if self.minute != None:
oprot.writeFieldBegin('minute', TType.BYTE, 2)
oprot.writeByte(self.minute)
oprot.writeFieldEnd()
if self.sec != None:
oprot.writeFieldBegin('sec', TType.BYTE, 3)
oprot.writeByte(self.sec)
oprot.writeFieldEnd()
if self.microsec != None:
oprot.writeFieldBegin('microsec', TType.I32, 4)
oprot.writeI32(self.microsec)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def __repr__(self):
L = []
padding = ' ' * 4
if self.hour is not None:
value = pprint.pformat(self.hour, indent=0)
value = padding.join(value.splitlines(True))
L.append(' hour=%s' % (value))
if self.minute is not None:
value = pprint.pformat(self.minute, indent=0)
value = padding.join(value.splitlines(True))
L.append(' minute=%s' % (value))
if self.sec is not None:
value = pprint.pformat(self.sec, indent=0)
value = padding.join(value.splitlines(True))
L.append(' sec=%s' % (value))
if self.microsec is not None:
value = pprint.pformat(self.microsec, indent=0)
value = padding.join(value.splitlines(True))
L.append(' microsec=%s' % (value))
return "%s(%s)" % (self.__class__.__name__, "\n" + ",\n".join(L) if L else '')
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
# Override the __hash__ function for Python3 - t10434117
if not six.PY2:
__hash__ = object.__hash__
class DateTime:
"""
Attributes:
- year
- month
- day
- hour
- minute
- sec
- microsec
"""
thrift_spec = None
thrift_field_annotations = None
thrift_struct_annotations = None
__init__ = None
@staticmethod
def isUnion():
return False
def read(self, iprot):
if (isinstance(iprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0)
return
if (isinstance(iprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I16:
self.year = iprot.readI16()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.BYTE:
self.month = iprot.readByte()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.BYTE:
self.day = iprot.readByte()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.BYTE:
self.hour = iprot.readByte()
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.BYTE:
self.minute = iprot.readByte()
else:
iprot.skip(ftype)
elif fid == 6:
if ftype == TType.BYTE:
self.sec = iprot.readByte()
else:
iprot.skip(ftype)
elif fid == 7:
if ftype == TType.I32:
self.microsec = iprot.readI32()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (isinstance(oprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0))
return
if (isinstance(oprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2))
return
oprot.writeStructBegin('DateTime')
if self.year != None:
oprot.writeFieldBegin('year', TType.I16, 1)
oprot.writeI16(self.year)
oprot.writeFieldEnd()
if self.month != None:
oprot.writeFieldBegin('month', TType.BYTE, 2)
oprot.writeByte(self.month)
oprot.writeFieldEnd()
if self.day != None:
oprot.writeFieldBegin('day', TType.BYTE, 3)
oprot.writeByte(self.day)
oprot.writeFieldEnd()
if self.hour != None:
oprot.writeFieldBegin('hour', TType.BYTE, 4)
oprot.writeByte(self.hour)
oprot.writeFieldEnd()
if self.minute != None:
oprot.writeFieldBegin('minute', TType.BYTE, 5)
oprot.writeByte(self.minute)
oprot.writeFieldEnd()
if self.sec != None:
oprot.writeFieldBegin('sec', TType.BYTE, 6)
oprot.writeByte(self.sec)
oprot.writeFieldEnd()
if self.microsec != None:
oprot.writeFieldBegin('microsec', TType.I32, 7)
oprot.writeI32(self.microsec)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def __repr__(self):
L = []
padding = ' ' * 4
if self.year is not None:
value = pprint.pformat(self.year, indent=0)
value = padding.join(value.splitlines(True))
L.append(' year=%s' % (value))
if self.month is not None:
value = pprint.pformat(self.month, indent=0)
value = padding.join(value.splitlines(True))
L.append(' month=%s' % (value))
if self.day is not None:
value = pprint.pformat(self.day, indent=0)
value = padding.join(value.splitlines(True))
L.append(' day=%s' % (value))
if self.hour is not None:
value = pprint.pformat(self.hour, indent=0)
value = padding.join(value.splitlines(True))
L.append(' hour=%s' % (value))
if self.minute is not None:
value = pprint.pformat(self.minute, indent=0)
value = padding.join(value.splitlines(True))
L.append(' minute=%s' % (value))
if self.sec is not None:
value = pprint.pformat(self.sec, indent=0)
value = padding.join(value.splitlines(True))
L.append(' sec=%s' % (value))
if self.microsec is not None:
value = pprint.pformat(self.microsec, indent=0)
value = padding.join(value.splitlines(True))
L.append(' microsec=%s' % (value))
return "%s(%s)" % (self.__class__.__name__, "\n" + ",\n".join(L) if L else '')
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
# Override the __hash__ function for Python3 - t10434117
if not six.PY2:
__hash__ = object.__hash__
class Value(object):
"""
Attributes:
- nVal
- bVal
- iVal
- fVal
- sVal
- dVal
- tVal
- dtVal
- vVal
- eVal
- pVal
- lVal
- mVal
- uVal
- gVal
- ggVal
"""
thrift_spec = None
__init__ = None
__EMPTY__ = 0
NVAL = 1
BVAL = 2
IVAL = 3
FVAL = 4
SVAL = 5
DVAL = 6
TVAL = 7
DTVAL = 8
VVAL = 9
EVAL = 10
PVAL = 11
LVAL = 12
MVAL = 13
UVAL = 14
GVAL = 15
GGVAL = 16
@staticmethod
def isUnion():
return True
def get_nVal(self):
assert self.field == 1
return self.value
def get_bVal(self):
assert self.field == 2
return self.value
def get_iVal(self):
assert self.field == 3
return self.value
def get_fVal(self):
assert self.field == 4
return self.value
def get_sVal(self):
assert self.field == 5
return self.value
def get_dVal(self):
assert self.field == 6
return self.value
def get_tVal(self):
assert self.field == 7
return self.value
def get_dtVal(self):
assert self.field == 8
return self.value
def get_vVal(self):
assert self.field == 9
return self.value
def get_eVal(self):
assert self.field == 10
return self.value
def get_pVal(self):
assert self.field == 11
return self.value
def get_lVal(self):
assert self.field == 12
return self.value
def get_mVal(self):
assert self.field == 13
return self.value
def get_uVal(self):
assert self.field == 14
return self.value
def get_gVal(self):
assert self.field == 15
return self.value
def get_ggVal(self):
assert self.field == 16
return self.value
def set_nVal(self, value):
self.field = 1
self.value = value
def set_bVal(self, value):
self.field = 2
self.value = value
def set_iVal(self, value):
self.field = 3
self.value = value
def set_fVal(self, value):
self.field = 4
self.value = value
def set_sVal(self, value):
self.field = 5
self.value = value
def set_dVal(self, value):
self.field = 6
self.value = value
def set_tVal(self, value):
self.field = 7
self.value = value
def set_dtVal(self, value):
self.field = 8
self.value = value
def set_vVal(self, value):
self.field = 9
self.value = value
def set_eVal(self, value):
self.field = 10
self.value = value
def set_pVal(self, value):
self.field = 11
self.value = value
def set_lVal(self, value):
self.field = 12
self.value = value
def set_mVal(self, value):
self.field = 13
self.value = value
def set_uVal(self, value):
self.field = 14
self.value = value
def set_gVal(self, value):
self.field = 15
self.value = value
def set_ggVal(self, value):
self.field = 16
self.value = value
def getType(self):
return self.field
def __repr__(self):
value = pprint.pformat(self.value)
member = ''
if self.field == 1:
padding = ' ' * 5
value = padding.join(value.splitlines(True))
member = '\n %s=%s' % ('nVal', value)
if self.field == 2:
padding = ' ' * 5
value = padding.join(value.splitlines(True))
member = '\n %s=%s' % ('bVal', value)
if self.field == 3:
padding = ' ' * 5
value = padding.join(value.splitlines(True))
member = '\n %s=%s' % ('iVal', value)
if self.field == 4:
padding = ' ' * 5
value = padding.join(value.splitlines(True))
member = '\n %s=%s' % ('fVal', value)
if self.field == 5:
padding = ' ' * 5
value = padding.join(value.splitlines(True))
member = '\n %s=%s' % ('sVal', value)
if self.field == 6:
padding = ' ' * 5
value = padding.join(value.splitlines(True))
member = '\n %s=%s' % ('dVal', value)
if self.field == 7:
padding = ' ' * 5
value = padding.join(value.splitlines(True))
member = '\n %s=%s' % ('tVal', value)
if self.field == 8:
padding = ' ' * 6
value = padding.join(value.splitlines(True))
member = '\n %s=%s' % ('dtVal', value)
if self.field == 9:
padding = ' ' * 5
value = padding.join(value.splitlines(True))
member = '\n %s=%s' % ('vVal', value)
if self.field == 10:
padding = ' ' * 5
value = padding.join(value.splitlines(True))
member = '\n %s=%s' % ('eVal', value)
if self.field == 11:
padding = ' ' * 5
value = padding.join(value.splitlines(True))
member = '\n %s=%s' % ('pVal', value)
if self.field == 12:
padding = ' ' * 5
value = padding.join(value.splitlines(True))
member = '\n %s=%s' % ('lVal', value)
if self.field == 13:
padding = ' ' * 5
value = padding.join(value.splitlines(True))
member = '\n %s=%s' % ('mVal', value)
if self.field == 14:
padding = ' ' * 5
value = padding.join(value.splitlines(True))
member = '\n %s=%s' % ('uVal', value)
if self.field == 15:
padding = ' ' * 5
value = padding.join(value.splitlines(True))
member = '\n %s=%s' % ('gVal', value)
if self.field == 16:
padding = ' ' * 6
value = padding.join(value.splitlines(True))
member = '\n %s=%s' % ('ggVal', value)
return "%s(%s)" % (self.__class__.__name__, member)
def read(self, iprot):
self.field = 0
self.value = None
if (isinstance(iprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, True], utf8strings=UTF8STRINGS, protoid=0)
return
if (isinstance(iprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, True], utf8strings=UTF8STRINGS, protoid=2)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
nVal = iprot.readI32()
assert self.field == 0 and self.value is None
self.set_nVal(nVal)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.BOOL:
bVal = iprot.readBool()
assert self.field == 0 and self.value is None
self.set_bVal(bVal)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I64:
iVal = iprot.readI64()
assert self.field == 0 and self.value is None
self.set_iVal(iVal)
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.DOUBLE:
fVal = iprot.readDouble()
assert self.field == 0 and self.value is None
self.set_fVal(fVal)
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.STRING:
sVal = iprot.readString()
assert self.field == 0 and self.value is None
self.set_sVal(sVal)
else:
iprot.skip(ftype)
elif fid == 6:
if ftype == TType.STRUCT:
dVal = Date()
dVal.read(iprot)
assert self.field == 0 and self.value is None
self.set_dVal(dVal)
else:
iprot.skip(ftype)
elif fid == 7:
if ftype == TType.STRUCT:
tVal = Time()
tVal.read(iprot)
assert self.field == 0 and self.value is None
self.set_tVal(tVal)
else:
iprot.skip(ftype)
elif fid == 8:
if ftype == TType.STRUCT:
dtVal = DateTime()
dtVal.read(iprot)
assert self.field == 0 and self.value is None
self.set_dtVal(dtVal)
else:
iprot.skip(ftype)
elif fid == 9:
if ftype == TType.STRUCT:
vVal = Vertex()
vVal.read(iprot)
assert self.field == 0 and self.value is None
self.set_vVal(vVal)
else:
iprot.skip(ftype)
elif fid == 10:
if ftype == TType.STRUCT:
eVal = Edge()
eVal.read(iprot)
assert self.field == 0 and self.value is None
self.set_eVal(eVal)
else:
iprot.skip(ftype)
elif fid == 11:
if ftype == TType.STRUCT:
pVal = Path()
pVal.read(iprot)
assert self.field == 0 and self.value is None
self.set_pVal(pVal)
else:
iprot.skip(ftype)
elif fid == 12:
if ftype == TType.STRUCT:
lVal = NList()
lVal.read(iprot)
assert self.field == 0 and self.value is None
self.set_lVal(lVal)
else:
iprot.skip(ftype)
elif fid == 13:
if ftype == TType.STRUCT:
mVal = NMap()
mVal.read(iprot)
assert self.field == 0 and self.value is None
self.set_mVal(mVal)
else:
iprot.skip(ftype)
elif fid == 14:
if ftype == TType.STRUCT:
uVal = NSet()
uVal.read(iprot)
assert self.field == 0 and self.value is None
self.set_uVal(uVal)
else:
iprot.skip(ftype)
elif fid == 15:
if ftype == TType.STRUCT:
gVal = DataSet()
gVal.read(iprot)
assert self.field == 0 and self.value is None
self.set_gVal(gVal)
else:
iprot.skip(ftype)
elif fid == 16:
if ftype == TType.STRUCT:
ggVal = Geography()
ggVal.read(iprot)
assert self.field == 0 and self.value is None
self.set_ggVal(ggVal)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (isinstance(oprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, True], utf8strings=UTF8STRINGS, protoid=0))
return
if (isinstance(oprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, True], utf8strings=UTF8STRINGS, protoid=2))
return
oprot.writeUnionBegin('Value')
if self.field == 1:
oprot.writeFieldBegin('nVal', TType.I32, 1)
nVal = self.value
oprot.writeI32(nVal)
oprot.writeFieldEnd()
if self.field == 2:
oprot.writeFieldBegin('bVal', TType.BOOL, 2)
bVal = self.value
oprot.writeBool(bVal)
oprot.writeFieldEnd()
if self.field == 3:
oprot.writeFieldBegin('iVal', TType.I64, 3)
iVal = self.value
oprot.writeI64(iVal)
oprot.writeFieldEnd()
if self.field == 4:
oprot.writeFieldBegin('fVal', TType.DOUBLE, 4)
fVal = self.value
oprot.writeDouble(fVal)
oprot.writeFieldEnd()
if self.field == 5:
oprot.writeFieldBegin('sVal', TType.STRING, 5)
sVal = self.value
oprot.writeString(sVal)
oprot.writeFieldEnd()
if self.field == 6:
oprot.writeFieldBegin('dVal', TType.STRUCT, 6)
dVal = self.value
dVal.write(oprot)
oprot.writeFieldEnd()
if self.field == 7:
oprot.writeFieldBegin('tVal', TType.STRUCT, 7)
tVal = self.value
tVal.write(oprot)
oprot.writeFieldEnd()
if self.field == 8:
oprot.writeFieldBegin('dtVal', TType.STRUCT, 8)
dtVal = self.value
dtVal.write(oprot)
oprot.writeFieldEnd()
if self.field == 9:
oprot.writeFieldBegin('vVal', TType.STRUCT, 9)
vVal = self.value
vVal.write(oprot)
oprot.writeFieldEnd()
if self.field == 10:
oprot.writeFieldBegin('eVal', TType.STRUCT, 10)
eVal = self.value
eVal.write(oprot)
oprot.writeFieldEnd()
if self.field == 11:
oprot.writeFieldBegin('pVal', TType.STRUCT, 11)
pVal = self.value
pVal.write(oprot)
oprot.writeFieldEnd()
if self.field == 12:
oprot.writeFieldBegin('lVal', TType.STRUCT, 12)
lVal = self.value
lVal.write(oprot)
oprot.writeFieldEnd()
if self.field == 13:
oprot.writeFieldBegin('mVal', TType.STRUCT, 13)
mVal = self.value
mVal.write(oprot)
oprot.writeFieldEnd()
if self.field == 14:
oprot.writeFieldBegin('uVal', TType.STRUCT, 14)
uVal = self.value
uVal.write(oprot)
oprot.writeFieldEnd()
if self.field == 15:
oprot.writeFieldBegin('gVal', TType.STRUCT, 15)
gVal = self.value
gVal.write(oprot)
oprot.writeFieldEnd()
if self.field == 16:
oprot.writeFieldBegin('ggVal', TType.STRUCT, 16)
ggVal = self.value
ggVal.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeUnionEnd()
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class NList:
"""
Attributes:
- values
"""
thrift_spec = None
thrift_field_annotations = None
thrift_struct_annotations = None
__init__ = None
@staticmethod
def isUnion():
return False
def read(self, iprot):
if (isinstance(iprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0)
return
if (isinstance(iprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.LIST:
self.values = []
(_etype3, _size0) = iprot.readListBegin()
if _size0 >= 0:
for _i4 in six.moves.range(_size0):
_elem5 = Value()
_elem5.read(iprot)
self.values.append(_elem5)
else:
while iprot.peekList():
_elem6 = Value()
_elem6.read(iprot)
self.values.append(_elem6)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (isinstance(oprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0))
return
if (isinstance(oprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2))
return
oprot.writeStructBegin('NList')
if self.values != None:
oprot.writeFieldBegin('values', TType.LIST, 1)
oprot.writeListBegin(TType.STRUCT, len(self.values))
for iter7 in self.values:
iter7.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def __repr__(self):
L = []
padding = ' ' * 4
if self.values is not None:
value = pprint.pformat(self.values, indent=0)
value = padding.join(value.splitlines(True))
L.append(' values=%s' % (value))
return "%s(%s)" % (self.__class__.__name__, "\n" + ",\n".join(L) if L else '')
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
# Override the __hash__ function for Python3 - t10434117
if not six.PY2:
__hash__ = object.__hash__
class NMap:
"""
Attributes:
- kvs
"""
thrift_spec = None
thrift_field_annotations = None
thrift_struct_annotations = None
__init__ = None
@staticmethod
def isUnion():
return False
def read(self, iprot):
if (isinstance(iprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0)
return
if (isinstance(iprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.MAP:
self.kvs = {}
(_ktype9, _vtype10, _size8 ) = iprot.readMapBegin()
if _size8 >= 0:
for _i12 in six.moves.range(_size8):
_key13 = iprot.readString()
_val14 = Value()
_val14.read(iprot)
self.kvs[_key13] = _val14
else:
while iprot.peekMap():
_key15 = iprot.readString()
_val16 = Value()
_val16.read(iprot)
self.kvs[_key15] = _val16
iprot.readMapEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (isinstance(oprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0))
return
if (isinstance(oprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2))
return
oprot.writeStructBegin('NMap')
if self.kvs != None:
oprot.writeFieldBegin('kvs', TType.MAP, 1)
oprot.writeMapBegin(TType.STRING, TType.STRUCT, len(self.kvs))
for kiter17,viter18 in self.kvs.items():
oprot.writeString(kiter17)
viter18.write(oprot)
oprot.writeMapEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def __repr__(self):
L = []
padding = ' ' * 4
if self.kvs is not None:
value = pprint.pformat(self.kvs, indent=0)
value = padding.join(value.splitlines(True))
L.append(' kvs=%s' % (value))
return "%s(%s)" % (self.__class__.__name__, "\n" + ",\n".join(L) if L else '')
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
# Override the __hash__ function for Python3 - t10434117
if not six.PY2:
__hash__ = object.__hash__
class NSet:
"""
Attributes:
- values
"""
thrift_spec = None
thrift_field_annotations = None
thrift_struct_annotations = None
__init__ = None
@staticmethod
def isUnion():
return False
def read(self, iprot):
if (isinstance(iprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0)
return
if (isinstance(iprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.SET:
self.values = set()
(_etype22, _size19) = iprot.readSetBegin()
if _size19 >= 0:
for _i23 in six.moves.range(_size19):
_elem24 = Value()
_elem24.read(iprot)
self.values.add(_elem24)
else:
while iprot.peekSet():
_elem25 = Value()
_elem25.read(iprot)
self.values.add(_elem25)
iprot.readSetEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (isinstance(oprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0))
return
if (isinstance(oprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2))
return
oprot.writeStructBegin('NSet')
if self.values != None:
oprot.writeFieldBegin('values', TType.SET, 1)
oprot.writeSetBegin(TType.STRUCT, len(self.values))
for iter26 in self.values:
iter26.write(oprot)
oprot.writeSetEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def __repr__(self):
L = []
padding = ' ' * 4
if self.values is not None:
value = pprint.pformat(self.values, indent=0)
value = padding.join(value.splitlines(True))
L.append(' values=%s' % (value))
return "%s(%s)" % (self.__class__.__name__, "\n" + ",\n".join(L) if L else '')
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
# Override the __hash__ function for Python3 - t10434117
if not six.PY2:
__hash__ = object.__hash__
class Row:
"""
Attributes:
- values
"""
thrift_spec = None
thrift_field_annotations = None
thrift_struct_annotations = None
__init__ = None
@staticmethod
def isUnion():
return False
def read(self, iprot):
if (isinstance(iprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0)
return
if (isinstance(iprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.LIST:
self.values = []
(_etype30, _size27) = iprot.readListBegin()
if _size27 >= 0:
for _i31 in six.moves.range(_size27):
_elem32 = Value()
_elem32.read(iprot)
self.values.append(_elem32)
else:
while iprot.peekList():
_elem33 = Value()
_elem33.read(iprot)
self.values.append(_elem33)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (isinstance(oprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0))
return
if (isinstance(oprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2))
return
oprot.writeStructBegin('Row')
if self.values != None:
oprot.writeFieldBegin('values', TType.LIST, 1)
oprot.writeListBegin(TType.STRUCT, len(self.values))
for iter34 in self.values:
iter34.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def __repr__(self):
L = []
padding = ' ' * 4
if self.values is not None:
value = pprint.pformat(self.values, indent=0)
value = padding.join(value.splitlines(True))
L.append(' values=%s' % (value))
return "%s(%s)" % (self.__class__.__name__, "\n" + ",\n".join(L) if L else '')
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
# Override the __hash__ function for Python3 - t10434117
if not six.PY2:
__hash__ = object.__hash__
class DataSet:
"""
Attributes:
- column_names
- rows
"""
thrift_spec = None
thrift_field_annotations = None
thrift_struct_annotations = None
__init__ = None
@staticmethod
def isUnion():
return False
def read(self, iprot):
if (isinstance(iprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0)
return
if (isinstance(iprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.LIST:
self.column_names = []
(_etype38, _size35) = iprot.readListBegin()
if _size35 >= 0:
for _i39 in six.moves.range(_size35):
_elem40 = iprot.readString()
self.column_names.append(_elem40)
else:
while iprot.peekList():
_elem41 = iprot.readString()
self.column_names.append(_elem41)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.LIST:
self.rows = []
(_etype45, _size42) = iprot.readListBegin()
if _size42 >= 0:
for _i46 in six.moves.range(_size42):
_elem47 = Row()
_elem47.read(iprot)
self.rows.append(_elem47)
else:
while iprot.peekList():
_elem48 = Row()
_elem48.read(iprot)
self.rows.append(_elem48)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (isinstance(oprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0))
return
if (isinstance(oprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2))
return
oprot.writeStructBegin('DataSet')
if self.column_names != None:
oprot.writeFieldBegin('column_names', TType.LIST, 1)
oprot.writeListBegin(TType.STRING, len(self.column_names))
for iter49 in self.column_names:
oprot.writeString(iter49)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.rows != None:
oprot.writeFieldBegin('rows', TType.LIST, 2)
oprot.writeListBegin(TType.STRUCT, len(self.rows))
for iter50 in self.rows:
iter50.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def __repr__(self):
L = []
padding = ' ' * 4
if self.column_names is not None:
value = pprint.pformat(self.column_names, indent=0)
value = padding.join(value.splitlines(True))
L.append(' column_names=%s' % (value))
if self.rows is not None:
value = pprint.pformat(self.rows, indent=0)
value = padding.join(value.splitlines(True))
L.append(' rows=%s' % (value))
return "%s(%s)" % (self.__class__.__name__, "\n" + ",\n".join(L) if L else '')
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
# Override the __hash__ function for Python3 - t10434117
if not six.PY2:
__hash__ = object.__hash__
class Coordinate:
"""
Attributes:
- x
- y
"""
thrift_spec = None
thrift_field_annotations = None
thrift_struct_annotations = None
__init__ = None
@staticmethod
def isUnion():
return False
def read(self, iprot):
if (isinstance(iprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0)
return
if (isinstance(iprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.DOUBLE:
self.x = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.DOUBLE:
self.y = iprot.readDouble()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (isinstance(oprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0))
return
if (isinstance(oprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2))
return
oprot.writeStructBegin('Coordinate')
if self.x != None:
oprot.writeFieldBegin('x', TType.DOUBLE, 1)
oprot.writeDouble(self.x)
oprot.writeFieldEnd()
if self.y != None:
oprot.writeFieldBegin('y', TType.DOUBLE, 2)
oprot.writeDouble(self.y)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def __repr__(self):
L = []
padding = ' ' * 4
if self.x is not None:
value = pprint.pformat(self.x, indent=0)
value = padding.join(value.splitlines(True))
L.append(' x=%s' % (value))
if self.y is not None:
value = pprint.pformat(self.y, indent=0)
value = padding.join(value.splitlines(True))
L.append(' y=%s' % (value))
return "%s(%s)" % (self.__class__.__name__, "\n" + ",\n".join(L) if L else '')
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
# Override the __hash__ function for Python3 - t10434117
if not six.PY2:
__hash__ = object.__hash__
class Point:
"""
Attributes:
- coord
"""
thrift_spec = None
thrift_field_annotations = None
thrift_struct_annotations = None
__init__ = None
@staticmethod
def isUnion():
return False
def read(self, iprot):
if (isinstance(iprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0)
return
if (isinstance(iprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.coord = Coordinate()
self.coord.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (isinstance(oprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0))
return
if (isinstance(oprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2))
return
oprot.writeStructBegin('Point')
if self.coord != None:
oprot.writeFieldBegin('coord', TType.STRUCT, 1)
self.coord.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def __repr__(self):
L = []
padding = ' ' * 4
if self.coord is not None:
value = pprint.pformat(self.coord, indent=0)
value = padding.join(value.splitlines(True))
L.append(' coord=%s' % (value))
return "%s(%s)" % (self.__class__.__name__, "\n" + ",\n".join(L) if L else '')
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
# Override the __hash__ function for Python3 - t10434117
if not six.PY2:
__hash__ = object.__hash__
class LineString:
"""
Attributes:
- coordList
"""
thrift_spec = None
thrift_field_annotations = None
thrift_struct_annotations = None
__init__ = None
@staticmethod
def isUnion():
return False
def read(self, iprot):
if (isinstance(iprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0)
return
if (isinstance(iprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.LIST:
self.coordList = []
(_etype54, _size51) = iprot.readListBegin()
if _size51 >= 0:
for _i55 in six.moves.range(_size51):
_elem56 = Coordinate()
_elem56.read(iprot)
self.coordList.append(_elem56)
else:
while iprot.peekList():
_elem57 = Coordinate()
_elem57.read(iprot)
self.coordList.append(_elem57)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (isinstance(oprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0))
return
if (isinstance(oprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2))
return
oprot.writeStructBegin('LineString')
if self.coordList != None:
oprot.writeFieldBegin('coordList', TType.LIST, 1)
oprot.writeListBegin(TType.STRUCT, len(self.coordList))
for iter58 in self.coordList:
iter58.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def __repr__(self):
L = []
padding = ' ' * 4
if self.coordList is not None:
value = pprint.pformat(self.coordList, indent=0)
value = padding.join(value.splitlines(True))
L.append(' coordList=%s' % (value))
return "%s(%s)" % (self.__class__.__name__, "\n" + ",\n".join(L) if L else '')
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
# Override the __hash__ function for Python3 - t10434117
if not six.PY2:
__hash__ = object.__hash__
class Polygon:
"""
Attributes:
- coordListList
"""
thrift_spec = None
thrift_field_annotations = None
thrift_struct_annotations = None
__init__ = None
@staticmethod
def isUnion():
return False
def read(self, iprot):
if (isinstance(iprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0)
return
if (isinstance(iprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.LIST:
self.coordListList = []
(_etype62, _size59) = iprot.readListBegin()
if _size59 >= 0:
for _i63 in six.moves.range(_size59):
_elem64 = []
(_etype68, _size65) = iprot.readListBegin()
if _size65 >= 0:
for _i69 in six.moves.range(_size65):
_elem70 = Coordinate()
_elem70.read(iprot)
_elem64.append(_elem70)
else:
while iprot.peekList():
_elem71 = Coordinate()
_elem71.read(iprot)
_elem64.append(_elem71)
iprot.readListEnd()
self.coordListList.append(_elem64)
else:
while iprot.peekList():
_elem72 = []
(_etype76, _size73) = iprot.readListBegin()
if _size73 >= 0:
for _i77 in six.moves.range(_size73):
_elem78 = Coordinate()
_elem78.read(iprot)
_elem72.append(_elem78)
else:
while iprot.peekList():
_elem79 = Coordinate()
_elem79.read(iprot)
_elem72.append(_elem79)
iprot.readListEnd()
self.coordListList.append(_elem72)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (isinstance(oprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0))
return
if (isinstance(oprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2))
return
oprot.writeStructBegin('Polygon')
if self.coordListList != None:
oprot.writeFieldBegin('coordListList', TType.LIST, 1)
oprot.writeListBegin(TType.LIST, len(self.coordListList))
for iter80 in self.coordListList:
oprot.writeListBegin(TType.STRUCT, len(iter80))
for iter81 in iter80:
iter81.write(oprot)
oprot.writeListEnd()
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def __repr__(self):
L = []
padding = ' ' * 4
if self.coordListList is not None:
value = pprint.pformat(self.coordListList, indent=0)
value = padding.join(value.splitlines(True))
L.append(' coordListList=%s' % (value))
return "%s(%s)" % (self.__class__.__name__, "\n" + ",\n".join(L) if L else '')
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
# Override the __hash__ function for Python3 - t10434117
if not six.PY2:
__hash__ = object.__hash__
class Geography(object):
"""
Attributes:
- ptVal
- lsVal
- pgVal
"""
thrift_spec = None
__init__ = None
__EMPTY__ = 0
PTVAL = 1
LSVAL = 2
PGVAL = 3
@staticmethod
def isUnion():
return True
def get_ptVal(self):
assert self.field == 1
return self.value
def get_lsVal(self):
assert self.field == 2
return self.value
def get_pgVal(self):
assert self.field == 3
return self.value
def set_ptVal(self, value):
self.field = 1
self.value = value
def set_lsVal(self, value):
self.field = 2
self.value = value
def set_pgVal(self, value):
self.field = 3
self.value = value
def getType(self):
return self.field
def __repr__(self):
value = pprint.pformat(self.value)
member = ''
if self.field == 1:
padding = ' ' * 6
value = padding.join(value.splitlines(True))
member = '\n %s=%s' % ('ptVal', value)
if self.field == 2:
padding = ' ' * 6
value = padding.join(value.splitlines(True))
member = '\n %s=%s' % ('lsVal', value)
if self.field == 3:
padding = ' ' * 6
value = padding.join(value.splitlines(True))
member = '\n %s=%s' % ('pgVal', value)
return "%s(%s)" % (self.__class__.__name__, member)
def read(self, iprot):
self.field = 0
self.value = None
if (isinstance(iprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, True], utf8strings=UTF8STRINGS, protoid=0)
return
if (isinstance(iprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, True], utf8strings=UTF8STRINGS, protoid=2)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
ptVal = Point()
ptVal.read(iprot)
assert self.field == 0 and self.value is None
self.set_ptVal(ptVal)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
lsVal = LineString()
lsVal.read(iprot)
assert self.field == 0 and self.value is None
self.set_lsVal(lsVal)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
pgVal = Polygon()
pgVal.read(iprot)
assert self.field == 0 and self.value is None
self.set_pgVal(pgVal)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (isinstance(oprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, True], utf8strings=UTF8STRINGS, protoid=0))
return
if (isinstance(oprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, True], utf8strings=UTF8STRINGS, protoid=2))
return
oprot.writeUnionBegin('Geography')
if self.field == 1:
oprot.writeFieldBegin('ptVal', TType.STRUCT, 1)
ptVal = self.value
ptVal.write(oprot)
oprot.writeFieldEnd()
if self.field == 2:
oprot.writeFieldBegin('lsVal', TType.STRUCT, 2)
lsVal = self.value
lsVal.write(oprot)
oprot.writeFieldEnd()
if self.field == 3:
oprot.writeFieldBegin('pgVal', TType.STRUCT, 3)
pgVal = self.value
pgVal.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeUnionEnd()
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class Tag:
"""
Attributes:
- name
- props
"""
thrift_spec = None
thrift_field_annotations = None
thrift_struct_annotations = None
__init__ = None
@staticmethod
def isUnion():
return False
def read(self, iprot):
if (isinstance(iprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0)
return
if (isinstance(iprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.name = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.MAP:
self.props = {}
(_ktype83, _vtype84, _size82 ) = iprot.readMapBegin()
if _size82 >= 0:
for _i86 in six.moves.range(_size82):
_key87 = iprot.readString()
_val88 = Value()
_val88.read(iprot)
self.props[_key87] = _val88
else:
while iprot.peekMap():
_key89 = iprot.readString()
_val90 = Value()
_val90.read(iprot)
self.props[_key89] = _val90
iprot.readMapEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (isinstance(oprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0))
return
if (isinstance(oprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2))
return
oprot.writeStructBegin('Tag')
if self.name != None:
oprot.writeFieldBegin('name', TType.STRING, 1)
oprot.writeString(self.name)
oprot.writeFieldEnd()
if self.props != None:
oprot.writeFieldBegin('props', TType.MAP, 2)
oprot.writeMapBegin(TType.STRING, TType.STRUCT, len(self.props))
for kiter91,viter92 in self.props.items():
oprot.writeString(kiter91)
viter92.write(oprot)
oprot.writeMapEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def __repr__(self):
L = []
padding = ' ' * 4
if self.name is not None:
value = pprint.pformat(self.name, indent=0)
value = padding.join(value.splitlines(True))
L.append(' name=%s' % (value))
if self.props is not None:
value = pprint.pformat(self.props, indent=0)
value = padding.join(value.splitlines(True))
L.append(' props=%s' % (value))
return "%s(%s)" % (self.__class__.__name__, "\n" + ",\n".join(L) if L else '')
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
# Override the __hash__ function for Python3 - t10434117
if not six.PY2:
__hash__ = object.__hash__
class Vertex:
"""
Attributes:
- vid
- tags
"""
thrift_spec = None
thrift_field_annotations = None
thrift_struct_annotations = None
__init__ = None
@staticmethod
def isUnion():
return False
def read(self, iprot):
if (isinstance(iprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0)
return
if (isinstance(iprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.vid = Value()
self.vid.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.LIST:
self.tags = []
(_etype96, _size93) = iprot.readListBegin()
if _size93 >= 0:
for _i97 in six.moves.range(_size93):
_elem98 = Tag()
_elem98.read(iprot)
self.tags.append(_elem98)
else:
while iprot.peekList():
_elem99 = Tag()
_elem99.read(iprot)
self.tags.append(_elem99)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (isinstance(oprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0))
return
if (isinstance(oprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2))
return
oprot.writeStructBegin('Vertex')
if self.vid != None:
oprot.writeFieldBegin('vid', TType.STRUCT, 1)
self.vid.write(oprot)
oprot.writeFieldEnd()
if self.tags != None:
oprot.writeFieldBegin('tags', TType.LIST, 2)
oprot.writeListBegin(TType.STRUCT, len(self.tags))
for iter100 in self.tags:
iter100.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def __repr__(self):
L = []
padding = ' ' * 4
if self.vid is not None:
value = pprint.pformat(self.vid, indent=0)
value = padding.join(value.splitlines(True))
L.append(' vid=%s' % (value))
if self.tags is not None:
value = pprint.pformat(self.tags, indent=0)
value = padding.join(value.splitlines(True))
L.append(' tags=%s' % (value))
return "%s(%s)" % (self.__class__.__name__, "\n" + ",\n".join(L) if L else '')
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
# Override the __hash__ function for Python3 - t10434117
if not six.PY2:
__hash__ = object.__hash__
class Edge:
"""
Attributes:
- src
- dst
- type
- name
- ranking
- props
"""
thrift_spec = None
thrift_field_annotations = None
thrift_struct_annotations = None
__init__ = None
@staticmethod
def isUnion():
return False
def read(self, iprot):
if (isinstance(iprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0)
return
if (isinstance(iprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.src = Value()
self.src.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.dst = Value()
self.dst.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I32:
self.type = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRING:
self.name = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.I64:
self.ranking = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 6:
if ftype == TType.MAP:
self.props = {}
(_ktype102, _vtype103, _size101 ) = iprot.readMapBegin()
if _size101 >= 0:
for _i105 in six.moves.range(_size101):
_key106 = iprot.readString()
_val107 = Value()
_val107.read(iprot)
self.props[_key106] = _val107
else:
while iprot.peekMap():
_key108 = iprot.readString()
_val109 = Value()
_val109.read(iprot)
self.props[_key108] = _val109
iprot.readMapEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (isinstance(oprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0))
return
if (isinstance(oprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2))
return
oprot.writeStructBegin('Edge')
if self.src != None:
oprot.writeFieldBegin('src', TType.STRUCT, 1)
self.src.write(oprot)
oprot.writeFieldEnd()
if self.dst != None:
oprot.writeFieldBegin('dst', TType.STRUCT, 2)
self.dst.write(oprot)
oprot.writeFieldEnd()
if self.type != None:
oprot.writeFieldBegin('type', TType.I32, 3)
oprot.writeI32(self.type)
oprot.writeFieldEnd()
if self.name != None:
oprot.writeFieldBegin('name', TType.STRING, 4)
oprot.writeString(self.name)
oprot.writeFieldEnd()
if self.ranking != None:
oprot.writeFieldBegin('ranking', TType.I64, 5)
oprot.writeI64(self.ranking)
oprot.writeFieldEnd()
if self.props != None:
oprot.writeFieldBegin('props', TType.MAP, 6)
oprot.writeMapBegin(TType.STRING, TType.STRUCT, len(self.props))
for kiter110,viter111 in self.props.items():
oprot.writeString(kiter110)
viter111.write(oprot)
oprot.writeMapEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def __repr__(self):
L = []
padding = ' ' * 4
if self.src is not None:
value = pprint.pformat(self.src, indent=0)
value = padding.join(value.splitlines(True))
L.append(' src=%s' % (value))
if self.dst is not None:
value = pprint.pformat(self.dst, indent=0)
value = padding.join(value.splitlines(True))
L.append(' dst=%s' % (value))
if self.type is not None:
value = pprint.pformat(self.type, indent=0)
value = padding.join(value.splitlines(True))
L.append(' type=%s' % (value))
if self.name is not None:
value = pprint.pformat(self.name, indent=0)
value = padding.join(value.splitlines(True))
L.append(' name=%s' % (value))
if self.ranking is not None:
value = pprint.pformat(self.ranking, indent=0)
value = padding.join(value.splitlines(True))
L.append(' ranking=%s' % (value))
if self.props is not None:
value = pprint.pformat(self.props, indent=0)
value = padding.join(value.splitlines(True))
L.append(' props=%s' % (value))
return "%s(%s)" % (self.__class__.__name__, "\n" + ",\n".join(L) if L else '')
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
# Override the __hash__ function for Python3 - t10434117
if not six.PY2:
__hash__ = object.__hash__
class Step:
"""
Attributes:
- dst
- type
- name
- ranking
- props
"""
thrift_spec = None
thrift_field_annotations = None
thrift_struct_annotations = None
__init__ = None
@staticmethod
def isUnion():
return False
def read(self, iprot):
if (isinstance(iprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0)
return
if (isinstance(iprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.dst = Vertex()
self.dst.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I32:
self.type = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.name = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.I64:
self.ranking = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.MAP:
self.props = {}
(_ktype113, _vtype114, _size112 ) = iprot.readMapBegin()
if _size112 >= 0:
for _i116 in six.moves.range(_size112):
_key117 = iprot.readString()
_val118 = Value()
_val118.read(iprot)
self.props[_key117] = _val118
else:
while iprot.peekMap():
_key119 = iprot.readString()
_val120 = Value()
_val120.read(iprot)
self.props[_key119] = _val120
iprot.readMapEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (isinstance(oprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0))
return
if (isinstance(oprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2))
return
oprot.writeStructBegin('Step')
if self.dst != None:
oprot.writeFieldBegin('dst', TType.STRUCT, 1)
self.dst.write(oprot)
oprot.writeFieldEnd()
if self.type != None:
oprot.writeFieldBegin('type', TType.I32, 2)
oprot.writeI32(self.type)
oprot.writeFieldEnd()
if self.name != None:
oprot.writeFieldBegin('name', TType.STRING, 3)
oprot.writeString(self.name)
oprot.writeFieldEnd()
if self.ranking != None:
oprot.writeFieldBegin('ranking', TType.I64, 4)
oprot.writeI64(self.ranking)
oprot.writeFieldEnd()
if self.props != None:
oprot.writeFieldBegin('props', TType.MAP, 5)
oprot.writeMapBegin(TType.STRING, TType.STRUCT, len(self.props))
for kiter121,viter122 in self.props.items():
oprot.writeString(kiter121)
viter122.write(oprot)
oprot.writeMapEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def __repr__(self):
L = []
padding = ' ' * 4
if self.dst is not None:
value = pprint.pformat(self.dst, indent=0)
value = padding.join(value.splitlines(True))
L.append(' dst=%s' % (value))
if self.type is not None:
value = pprint.pformat(self.type, indent=0)
value = padding.join(value.splitlines(True))
L.append(' type=%s' % (value))
if self.name is not None:
value = pprint.pformat(self.name, indent=0)
value = padding.join(value.splitlines(True))
L.append(' name=%s' % (value))
if self.ranking is not None:
value = pprint.pformat(self.ranking, indent=0)
value = padding.join(value.splitlines(True))
L.append(' ranking=%s' % (value))
if self.props is not None:
value = pprint.pformat(self.props, indent=0)
value = padding.join(value.splitlines(True))
L.append(' props=%s' % (value))
return "%s(%s)" % (self.__class__.__name__, "\n" + ",\n".join(L) if L else '')
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
# Override the __hash__ function for Python3 - t10434117
if not six.PY2:
__hash__ = object.__hash__
class Path:
"""
Attributes:
- src
- steps
"""
thrift_spec = None
thrift_field_annotations = None
thrift_struct_annotations = None
__init__ = None
@staticmethod
def isUnion():
return False
def read(self, iprot):
if (isinstance(iprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0)
return
if (isinstance(iprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.src = Vertex()
self.src.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.LIST:
self.steps = []
(_etype126, _size123) = iprot.readListBegin()
if _size123 >= 0:
for _i127 in six.moves.range(_size123):
_elem128 = Step()
_elem128.read(iprot)
self.steps.append(_elem128)
else:
while iprot.peekList():
_elem129 = Step()
_elem129.read(iprot)
self.steps.append(_elem129)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (isinstance(oprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0))
return
if (isinstance(oprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2))
return
oprot.writeStructBegin('Path')
if self.src != None:
oprot.writeFieldBegin('src', TType.STRUCT, 1)
self.src.write(oprot)
oprot.writeFieldEnd()
if self.steps != None:
oprot.writeFieldBegin('steps', TType.LIST, 2)
oprot.writeListBegin(TType.STRUCT, len(self.steps))
for iter130 in self.steps:
iter130.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def __repr__(self):
L = []
padding = ' ' * 4
if self.src is not None:
value = pprint.pformat(self.src, indent=0)
value = padding.join(value.splitlines(True))
L.append(' src=%s' % (value))
if self.steps is not None:
value = pprint.pformat(self.steps, indent=0)
value = padding.join(value.splitlines(True))
L.append(' steps=%s' % (value))
return "%s(%s)" % (self.__class__.__name__, "\n" + ",\n".join(L) if L else '')
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
# Override the __hash__ function for Python3 - t10434117
if not six.PY2:
__hash__ = object.__hash__
class HostAddr:
"""
Attributes:
- host
- port
"""
thrift_spec = None
thrift_field_annotations = None
thrift_struct_annotations = None
__init__ = None
@staticmethod
def isUnion():
return False
def read(self, iprot):
if (isinstance(iprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0)
return
if (isinstance(iprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.host = iprot.readString().decode('utf-8') if UTF8STRINGS else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I32:
self.port = iprot.readI32()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (isinstance(oprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0))
return
if (isinstance(oprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2))
return
oprot.writeStructBegin('HostAddr')
if self.host != None:
oprot.writeFieldBegin('host', TType.STRING, 1)
oprot.writeString(self.host.encode('utf-8')) if UTF8STRINGS and not isinstance(self.host, bytes) else oprot.writeString(self.host)
oprot.writeFieldEnd()
if self.port != None:
oprot.writeFieldBegin('port', TType.I32, 2)
oprot.writeI32(self.port)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def __repr__(self):
L = []
padding = ' ' * 4
if self.host is not None:
value = pprint.pformat(self.host, indent=0)
value = padding.join(value.splitlines(True))
L.append(' host=%s' % (value))
if self.port is not None:
value = pprint.pformat(self.port, indent=0)
value = padding.join(value.splitlines(True))
L.append(' port=%s' % (value))
return "%s(%s)" % (self.__class__.__name__, "\n" + ",\n".join(L) if L else '')
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
# Override the __hash__ function for Python3 - t10434117
if not six.PY2:
__hash__ = object.__hash__
class KeyValue:
"""
Attributes:
- key
- value
"""
thrift_spec = None
thrift_field_annotations = None
thrift_struct_annotations = None
__init__ = None
@staticmethod
def isUnion():
return False
def read(self, iprot):
if (isinstance(iprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0)
return
if (isinstance(iprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.key = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.value = iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (isinstance(oprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0))
return
if (isinstance(oprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2))
return
oprot.writeStructBegin('KeyValue')
if self.key != None:
oprot.writeFieldBegin('key', TType.STRING, 1)
oprot.writeString(self.key)
oprot.writeFieldEnd()
if self.value != None:
oprot.writeFieldBegin('value', TType.STRING, 2)
oprot.writeString(self.value)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def __repr__(self):
L = []
padding = ' ' * 4
if self.key is not None:
value = pprint.pformat(self.key, indent=0)
value = padding.join(value.splitlines(True))
L.append(' key=%s' % (value))
if self.value is not None:
value = pprint.pformat(self.value, indent=0)
value = padding.join(value.splitlines(True))
L.append(' value=%s' % (value))
return "%s(%s)" % (self.__class__.__name__, "\n" + ",\n".join(L) if L else '')
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
# Override the __hash__ function for Python3 - t10434117
if not six.PY2:
__hash__ = object.__hash__
class LogInfo:
"""
Attributes:
- log_id
- term_id
"""
thrift_spec = None
thrift_field_annotations = None
thrift_struct_annotations = None
__init__ = None
@staticmethod
def isUnion():
return False
def read(self, iprot):
if (isinstance(iprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0)
return
if (isinstance(iprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I64:
self.log_id = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I64:
self.term_id = iprot.readI64()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (isinstance(oprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0))
return
if (isinstance(oprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2))
return
oprot.writeStructBegin('LogInfo')
if self.log_id != None:
oprot.writeFieldBegin('log_id', TType.I64, 1)
oprot.writeI64(self.log_id)
oprot.writeFieldEnd()
if self.term_id != None:
oprot.writeFieldBegin('term_id', TType.I64, 2)
oprot.writeI64(self.term_id)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def __repr__(self):
L = []
padding = ' ' * 4
if self.log_id is not None:
value = pprint.pformat(self.log_id, indent=0)
value = padding.join(value.splitlines(True))
L.append(' log_id=%s' % (value))
if self.term_id is not None:
value = pprint.pformat(self.term_id, indent=0)
value = padding.join(value.splitlines(True))
L.append(' term_id=%s' % (value))
return "%s(%s)" % (self.__class__.__name__, "\n" + ",\n".join(L) if L else '')
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
# Override the __hash__ function for Python3 - t10434117
if not six.PY2:
__hash__ = object.__hash__
class DirInfo:
"""
Attributes:
- root
- data
"""
thrift_spec = None
thrift_field_annotations = None
thrift_struct_annotations = None
__init__ = None
@staticmethod
def isUnion():
return False
def read(self, iprot):
if (isinstance(iprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0)
return
if (isinstance(iprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.root = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.LIST:
self.data = []
(_etype134, _size131) = iprot.readListBegin()
if _size131 >= 0:
for _i135 in six.moves.range(_size131):
_elem136 = iprot.readString()
self.data.append(_elem136)
else:
while iprot.peekList():
_elem137 = iprot.readString()
self.data.append(_elem137)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (isinstance(oprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0))
return
if (isinstance(oprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2))
return
oprot.writeStructBegin('DirInfo')
if self.root != None:
oprot.writeFieldBegin('root', TType.STRING, 1)
oprot.writeString(self.root)
oprot.writeFieldEnd()
if self.data != None:
oprot.writeFieldBegin('data', TType.LIST, 2)
oprot.writeListBegin(TType.STRING, len(self.data))
for iter138 in self.data:
oprot.writeString(iter138)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def __repr__(self):
L = []
padding = ' ' * 4
if self.root is not None:
value = pprint.pformat(self.root, indent=0)
value = padding.join(value.splitlines(True))
L.append(' root=%s' % (value))
if self.data is not None:
value = pprint.pformat(self.data, indent=0)
value = padding.join(value.splitlines(True))
L.append(' data=%s' % (value))
return "%s(%s)" % (self.__class__.__name__, "\n" + ",\n".join(L) if L else '')
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
# Override the __hash__ function for Python3 - t10434117
if not six.PY2:
__hash__ = object.__hash__
class NodeInfo:
"""
Attributes:
- host
- dir
"""
thrift_spec = None
thrift_field_annotations = None
thrift_struct_annotations = None
__init__ = None
@staticmethod
def isUnion():
return False
def read(self, iprot):
if (isinstance(iprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0)
return
if (isinstance(iprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.host = HostAddr()
self.host.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.dir = DirInfo()
self.dir.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (isinstance(oprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0))
return
if (isinstance(oprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2))
return
oprot.writeStructBegin('NodeInfo')
if self.host != None:
oprot.writeFieldBegin('host', TType.STRUCT, 1)
self.host.write(oprot)
oprot.writeFieldEnd()
if self.dir != None:
oprot.writeFieldBegin('dir', TType.STRUCT, 2)
self.dir.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def __repr__(self):
L = []
padding = ' ' * 4
if self.host is not None:
value = pprint.pformat(self.host, indent=0)
value = padding.join(value.splitlines(True))
L.append(' host=%s' % (value))
if self.dir is not None:
value = pprint.pformat(self.dir, indent=0)
value = padding.join(value.splitlines(True))
L.append(' dir=%s' % (value))
return "%s(%s)" % (self.__class__.__name__, "\n" + ",\n".join(L) if L else '')
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
# Override the __hash__ function for Python3 - t10434117
if not six.PY2:
__hash__ = object.__hash__
class PartitionBackupInfo:
"""
Attributes:
- info
"""
thrift_spec = None
thrift_field_annotations = None
thrift_struct_annotations = None
__init__ = None
@staticmethod
def isUnion():
return False
def read(self, iprot):
if (isinstance(iprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0)
return
if (isinstance(iprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.MAP:
self.info = {}
(_ktype140, _vtype141, _size139 ) = iprot.readMapBegin()
if _size139 >= 0:
for _i143 in six.moves.range(_size139):
_key144 = iprot.readI32()
_val145 = LogInfo()
_val145.read(iprot)
self.info[_key144] = _val145
else:
while iprot.peekMap():
_key146 = iprot.readI32()
_val147 = LogInfo()
_val147.read(iprot)
self.info[_key146] = _val147
iprot.readMapEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (isinstance(oprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0))
return
if (isinstance(oprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2))
return
oprot.writeStructBegin('PartitionBackupInfo')
if self.info != None:
oprot.writeFieldBegin('info', TType.MAP, 1)
oprot.writeMapBegin(TType.I32, TType.STRUCT, len(self.info))
for kiter148,viter149 in self.info.items():
oprot.writeI32(kiter148)
viter149.write(oprot)
oprot.writeMapEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def __repr__(self):
L = []
padding = ' ' * 4
if self.info is not None:
value = pprint.pformat(self.info, indent=0)
value = padding.join(value.splitlines(True))
L.append(' info=%s' % (value))
return "%s(%s)" % (self.__class__.__name__, "\n" + ",\n".join(L) if L else '')
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
# Override the __hash__ function for Python3 - t10434117
if not six.PY2:
__hash__ = object.__hash__
class CheckpointInfo:
"""
Attributes:
- partition_info
- path
"""
thrift_spec = None
thrift_field_annotations = None
thrift_struct_annotations = None
__init__ = None
@staticmethod
def isUnion():
return False
def read(self, iprot):
if (isinstance(iprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0)
return
if (isinstance(iprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.partition_info = PartitionBackupInfo()
self.partition_info.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.path = iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (isinstance(oprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0))
return
if (isinstance(oprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2))
return
oprot.writeStructBegin('CheckpointInfo')
if self.partition_info != None:
oprot.writeFieldBegin('partition_info', TType.STRUCT, 1)
self.partition_info.write(oprot)
oprot.writeFieldEnd()
if self.path != None:
oprot.writeFieldBegin('path', TType.STRING, 2)
oprot.writeString(self.path)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def __repr__(self):
L = []
padding = ' ' * 4
if self.partition_info is not None:
value = pprint.pformat(self.partition_info, indent=0)
value = padding.join(value.splitlines(True))
L.append(' partition_info=%s' % (value))
if self.path is not None:
value = pprint.pformat(self.path, indent=0)
value = padding.join(value.splitlines(True))
L.append(' path=%s' % (value))
return "%s(%s)" % (self.__class__.__name__, "\n" + ",\n".join(L) if L else '')
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
# Override the __hash__ function for Python3 - t10434117
if not six.PY2:
__hash__ = object.__hash__
class LogEntry:
"""
Attributes:
- cluster
- log_str
"""
thrift_spec = None
thrift_field_annotations = None
thrift_struct_annotations = None
__init__ = None
@staticmethod
def isUnion():
return False
def read(self, iprot):
if (isinstance(iprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0)
return
if (isinstance(iprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I64:
self.cluster = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.log_str = iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (isinstance(oprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0))
return
if (isinstance(oprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2))
return
oprot.writeStructBegin('LogEntry')
if self.cluster != None:
oprot.writeFieldBegin('cluster', TType.I64, 1)
oprot.writeI64(self.cluster)
oprot.writeFieldEnd()
if self.log_str != None:
oprot.writeFieldBegin('log_str', TType.STRING, 2)
oprot.writeString(self.log_str)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def __repr__(self):
L = []
padding = ' ' * 4
if self.cluster is not None:
value = pprint.pformat(self.cluster, indent=0)
value = padding.join(value.splitlines(True))
L.append(' cluster=%s' % (value))
if self.log_str is not None:
value = pprint.pformat(self.log_str, indent=0)
value = padding.join(value.splitlines(True))
L.append(' log_str=%s' % (value))
return "%s(%s)" % (self.__class__.__name__, "\n" + ",\n".join(L) if L else '')
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
# Override the __hash__ function for Python3 - t10434117
if not six.PY2:
__hash__ = object.__hash__
ClusterID = UnimplementedTypedef()
GraphSpaceID = UnimplementedTypedef()
PartitionID = UnimplementedTypedef()
TagID = UnimplementedTypedef()
EdgeType = UnimplementedTypedef()
EdgeRanking = UnimplementedTypedef()
LogID = UnimplementedTypedef()
TermID = UnimplementedTypedef()
Timestamp = UnimplementedTypedef()
IndexID = UnimplementedTypedef()
Port = UnimplementedTypedef()
SessionID = UnimplementedTypedef()
ExecutionPlanID = UnimplementedTypedef()
all_structs.append(SchemaID)
SchemaID.thrift_spec = (
None, # 0
(1, TType.I32, 'tag_id', None, None, 2, ), # 1
(2, TType.I32, 'edge_type', None, None, 2, ), # 2
)
SchemaID.thrift_struct_annotations = {
}
SchemaID.thrift_field_annotations = {
}
def SchemaID__init__(self, tag_id=None, edge_type=None,):
self.field = 0
self.value = None
if tag_id is not None:
assert self.field == 0 and self.value is None
self.field = 1
self.value = tag_id
if edge_type is not None:
assert self.field == 0 and self.value is None
self.field = 2
self.value = edge_type
SchemaID.__init__ = SchemaID__init__
all_structs.append(Date)
Date.thrift_spec = (
None, # 0
(1, TType.I16, 'year', None, None, 2, ), # 1
(2, TType.BYTE, 'month', None, None, 2, ), # 2
(3, TType.BYTE, 'day', None, None, 2, ), # 3
)
Date.thrift_struct_annotations = {
"cpp.type": "nebula::Date",
}
Date.thrift_field_annotations = {
}
def Date__init__(self, year=None, month=None, day=None,):
self.year = year
self.month = month
self.day = day
Date.__init__ = Date__init__
def Date__setstate__(self, state):
state.setdefault('year', None)
state.setdefault('month', None)
state.setdefault('day', None)
self.__dict__ = state
Date.__getstate__ = lambda self: self.__dict__.copy()
Date.__setstate__ = Date__setstate__
all_structs.append(Time)
Time.thrift_spec = (
None, # 0
(1, TType.BYTE, 'hour', None, None, 2, ), # 1
(2, TType.BYTE, 'minute', None, None, 2, ), # 2
(3, TType.BYTE, 'sec', None, None, 2, ), # 3
(4, TType.I32, 'microsec', None, None, 2, ), # 4
)
Time.thrift_struct_annotations = {
"cpp.type": "nebula::Time",
}
Time.thrift_field_annotations = {
}
def Time__init__(self, hour=None, minute=None, sec=None, microsec=None,):
self.hour = hour
self.minute = minute
self.sec = sec
self.microsec = microsec
Time.__init__ = Time__init__
def Time__setstate__(self, state):
state.setdefault('hour', None)
state.setdefault('minute', None)
state.setdefault('sec', None)
state.setdefault('microsec', None)
self.__dict__ = state
Time.__getstate__ = lambda self: self.__dict__.copy()
Time.__setstate__ = Time__setstate__
all_structs.append(DateTime)
DateTime.thrift_spec = (
None, # 0
(1, TType.I16, 'year', None, None, 2, ), # 1
(2, TType.BYTE, 'month', None, None, 2, ), # 2
(3, TType.BYTE, 'day', None, None, 2, ), # 3
(4, TType.BYTE, 'hour', None, None, 2, ), # 4
(5, TType.BYTE, 'minute', None, None, 2, ), # 5
(6, TType.BYTE, 'sec', None, None, 2, ), # 6
(7, TType.I32, 'microsec', None, None, 2, ), # 7
)
DateTime.thrift_struct_annotations = {
"cpp.type": "nebula::DateTime",
}
DateTime.thrift_field_annotations = {
}
def DateTime__init__(self, year=None, month=None, day=None, hour=None, minute=None, sec=None, microsec=None,):
self.year = year
self.month = month
self.day = day
self.hour = hour
self.minute = minute
self.sec = sec
self.microsec = microsec
DateTime.__init__ = DateTime__init__
def DateTime__setstate__(self, state):
state.setdefault('year', None)
state.setdefault('month', None)
state.setdefault('day', None)
state.setdefault('hour', None)
state.setdefault('minute', None)
state.setdefault('sec', None)
state.setdefault('microsec', None)
self.__dict__ = state
DateTime.__getstate__ = lambda self: self.__dict__.copy()
DateTime.__setstate__ = DateTime__setstate__
all_structs.append(Value)
Value.thrift_spec = (
None, # 0
(1, TType.I32, 'nVal', NullType, None, 2, ), # 1
(2, TType.BOOL, 'bVal', None, None, 2, ), # 2
(3, TType.I64, 'iVal', None, None, 2, ), # 3
(4, TType.DOUBLE, 'fVal', None, None, 2, ), # 4
(5, TType.STRING, 'sVal', False, None, 2, ), # 5
(6, TType.STRUCT, 'dVal', [Date, Date.thrift_spec, False], None, 2, ), # 6
(7, TType.STRUCT, 'tVal', [Time, Time.thrift_spec, False], None, 2, ), # 7
(8, TType.STRUCT, 'dtVal', [DateTime, DateTime.thrift_spec, False], None, 2, ), # 8
(9, TType.STRUCT, 'vVal', [Vertex, Vertex.thrift_spec, False], None, 2, ), # 9
(10, TType.STRUCT, 'eVal', [Edge, Edge.thrift_spec, False], None, 2, ), # 10
(11, TType.STRUCT, 'pVal', [Path, Path.thrift_spec, False], None, 2, ), # 11
(12, TType.STRUCT, 'lVal', [NList, NList.thrift_spec, False], None, 2, ), # 12
(13, TType.STRUCT, 'mVal', [NMap, NMap.thrift_spec, False], None, 2, ), # 13
(14, TType.STRUCT, 'uVal', [NSet, NSet.thrift_spec, False], None, 2, ), # 14
(15, TType.STRUCT, 'gVal', [DataSet, DataSet.thrift_spec, False], None, 2, ), # 15
(16, TType.STRUCT, 'ggVal', [Geography, Geography.thrift_spec, True], None, 2, ), # 16
)
Value.thrift_struct_annotations = {
"cpp.type": "nebula::Value",
}
Value.thrift_field_annotations = {
9: {
"cpp.ref_type": "unique",
},
10: {
"cpp.ref_type": "unique",
},
11: {
"cpp.ref_type": "unique",
},
12: {
"cpp.ref_type": "unique",
},
13: {
"cpp.ref_type": "unique",
},
14: {
"cpp.ref_type": "unique",
},
15: {
"cpp.ref_type": "unique",
},
16: {
"cpp.ref_type": "unique",
},
}
def Value__init__(self, nVal=None, bVal=None, iVal=None, fVal=None, sVal=None, dVal=None, tVal=None, dtVal=None, vVal=None, eVal=None, pVal=None, lVal=None, mVal=None, uVal=None, gVal=None, ggVal=None,):
self.field = 0
self.value = None
if nVal is not None:
assert self.field == 0 and self.value is None
self.field = 1
self.value = nVal
if bVal is not None:
assert self.field == 0 and self.value is None
self.field = 2
self.value = bVal
if iVal is not None:
assert self.field == 0 and self.value is None
self.field = 3
self.value = iVal
if fVal is not None:
assert self.field == 0 and self.value is None
self.field = 4
self.value = fVal
if sVal is not None:
assert self.field == 0 and self.value is None
self.field = 5
self.value = sVal
if dVal is not None:
assert self.field == 0 and self.value is None
self.field = 6
self.value = dVal
if tVal is not None:
assert self.field == 0 and self.value is None
self.field = 7
self.value = tVal
if dtVal is not None:
assert self.field == 0 and self.value is None
self.field = 8
self.value = dtVal
if vVal is not None:
assert self.field == 0 and self.value is None
self.field = 9
self.value = vVal
if eVal is not None:
assert self.field == 0 and self.value is None
self.field = 10
self.value = eVal
if pVal is not None:
assert self.field == 0 and self.value is None
self.field = 11
self.value = pVal
if lVal is not None:
assert self.field == 0 and self.value is None
self.field = 12
self.value = lVal
if mVal is not None:
assert self.field == 0 and self.value is None
self.field = 13
self.value = mVal
if uVal is not None:
assert self.field == 0 and self.value is None
self.field = 14
self.value = uVal
if gVal is not None:
assert self.field == 0 and self.value is None
self.field = 15
self.value = gVal
if ggVal is not None:
assert self.field == 0 and self.value is None
self.field = 16
self.value = ggVal
Value.__init__ = Value__init__
all_structs.append(NList)
NList.thrift_spec = (
None, # 0
(1, TType.LIST, 'values', (TType.STRUCT,[Value, Value.thrift_spec, True]), None, 2, ), # 1
)
NList.thrift_struct_annotations = {
"cpp.type": "nebula::List",
}
NList.thrift_field_annotations = {
}
def NList__init__(self, values=None,):
self.values = values
NList.__init__ = NList__init__
def NList__setstate__(self, state):
state.setdefault('values', None)
self.__dict__ = state
NList.__getstate__ = lambda self: self.__dict__.copy()
NList.__setstate__ = NList__setstate__
all_structs.append(NMap)
NMap.thrift_spec = (
None, # 0
(1, TType.MAP, 'kvs', (TType.STRING,False,TType.STRUCT,[Value, Value.thrift_spec, True]), None, 2, ), # 1
)
NMap.thrift_struct_annotations = {
"cpp.type": "nebula::Map",
}
NMap.thrift_field_annotations = {
}
def NMap__init__(self, kvs=None,):
self.kvs = kvs
NMap.__init__ = NMap__init__
def NMap__setstate__(self, state):
state.setdefault('kvs', None)
self.__dict__ = state
NMap.__getstate__ = lambda self: self.__dict__.copy()
NMap.__setstate__ = NMap__setstate__
all_structs.append(NSet)
NSet.thrift_spec = (
None, # 0
(1, TType.SET, 'values', (TType.STRUCT,[Value, Value.thrift_spec, True]), None, 2, ), # 1
)
NSet.thrift_struct_annotations = {
"cpp.type": "nebula::Set",
}
NSet.thrift_field_annotations = {
}
def NSet__init__(self, values=None,):
self.values = values
NSet.__init__ = NSet__init__
def NSet__setstate__(self, state):
state.setdefault('values', None)
self.__dict__ = state
NSet.__getstate__ = lambda self: self.__dict__.copy()
NSet.__setstate__ = NSet__setstate__
all_structs.append(Row)
Row.thrift_spec = (
None, # 0
(1, TType.LIST, 'values', (TType.STRUCT,[Value, Value.thrift_spec, True]), None, 2, ), # 1
)
Row.thrift_struct_annotations = {
"cpp.type": "nebula::Row",
}
Row.thrift_field_annotations = {
}
def Row__init__(self, values=None,):
self.values = values
Row.__init__ = Row__init__
def Row__setstate__(self, state):
state.setdefault('values', None)
self.__dict__ = state
Row.__getstate__ = lambda self: self.__dict__.copy()
Row.__setstate__ = Row__setstate__
all_structs.append(DataSet)
DataSet.thrift_spec = (
None, # 0
(1, TType.LIST, 'column_names', (TType.STRING,False), None, 2, ), # 1
(2, TType.LIST, 'rows', (TType.STRUCT,[Row, Row.thrift_spec, False]), None, 2, ), # 2
)
DataSet.thrift_struct_annotations = {
"cpp.type": "nebula::DataSet",
}
DataSet.thrift_field_annotations = {
}
def DataSet__init__(self, column_names=None, rows=None,):
self.column_names = column_names
self.rows = rows
DataSet.__init__ = DataSet__init__
def DataSet__setstate__(self, state):
state.setdefault('column_names', None)
state.setdefault('rows', None)
self.__dict__ = state
DataSet.__getstate__ = lambda self: self.__dict__.copy()
DataSet.__setstate__ = DataSet__setstate__
all_structs.append(Coordinate)
Coordinate.thrift_spec = (
None, # 0
(1, TType.DOUBLE, 'x', None, None, 2, ), # 1
(2, TType.DOUBLE, 'y', None, None, 2, ), # 2
)
Coordinate.thrift_struct_annotations = {
"cpp.type": "nebula::Coordinate",
}
Coordinate.thrift_field_annotations = {
}
def Coordinate__init__(self, x=None, y=None,):
self.x = x
self.y = y
Coordinate.__init__ = Coordinate__init__
def Coordinate__setstate__(self, state):
state.setdefault('x', None)
state.setdefault('y', None)
self.__dict__ = state
Coordinate.__getstate__ = lambda self: self.__dict__.copy()
Coordinate.__setstate__ = Coordinate__setstate__
all_structs.append(Point)
Point.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'coord', [Coordinate, Coordinate.thrift_spec, False], None, 2, ), # 1
)
Point.thrift_struct_annotations = {
"cpp.type": "nebula::Point",
}
Point.thrift_field_annotations = {
}
def Point__init__(self, coord=None,):
self.coord = coord
Point.__init__ = Point__init__
def Point__setstate__(self, state):
state.setdefault('coord', None)
self.__dict__ = state
Point.__getstate__ = lambda self: self.__dict__.copy()
Point.__setstate__ = Point__setstate__
all_structs.append(LineString)
LineString.thrift_spec = (
None, # 0
(1, TType.LIST, 'coordList', (TType.STRUCT,[Coordinate, Coordinate.thrift_spec, False]), None, 2, ), # 1
)
LineString.thrift_struct_annotations = {
"cpp.type": "nebula::LineString",
}
LineString.thrift_field_annotations = {
}
def LineString__init__(self, coordList=None,):
self.coordList = coordList
LineString.__init__ = LineString__init__
def LineString__setstate__(self, state):
state.setdefault('coordList', None)
self.__dict__ = state
LineString.__getstate__ = lambda self: self.__dict__.copy()
LineString.__setstate__ = LineString__setstate__
all_structs.append(Polygon)
Polygon.thrift_spec = (
None, # 0
(1, TType.LIST, 'coordListList', (TType.LIST,(TType.STRUCT,[Coordinate, Coordinate.thrift_spec, False])), None, 2, ), # 1
)
Polygon.thrift_struct_annotations = {
"cpp.type": "nebula::Polygon",
}
Polygon.thrift_field_annotations = {
}
def Polygon__init__(self, coordListList=None,):
self.coordListList = coordListList
Polygon.__init__ = Polygon__init__
def Polygon__setstate__(self, state):
state.setdefault('coordListList', None)
self.__dict__ = state
Polygon.__getstate__ = lambda self: self.__dict__.copy()
Polygon.__setstate__ = Polygon__setstate__
all_structs.append(Geography)
Geography.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'ptVal', [Point, Point.thrift_spec, False], None, 2, ), # 1
(2, TType.STRUCT, 'lsVal', [LineString, LineString.thrift_spec, False], None, 2, ), # 2
(3, TType.STRUCT, 'pgVal', [Polygon, Polygon.thrift_spec, False], None, 2, ), # 3
)
Geography.thrift_struct_annotations = {
"cpp.type": "nebula::Geography",
}
Geography.thrift_field_annotations = {
1: {
"cpp.ref_type": "unique",
},
2: {
"cpp.ref_type": "unique",
},
3: {
"cpp.ref_type": "unique",
},
}
def Geography__init__(self, ptVal=None, lsVal=None, pgVal=None,):
self.field = 0
self.value = None
if ptVal is not None:
assert self.field == 0 and self.value is None
self.field = 1
self.value = ptVal
if lsVal is not None:
assert self.field == 0 and self.value is None
self.field = 2
self.value = lsVal
if pgVal is not None:
assert self.field == 0 and self.value is None
self.field = 3
self.value = pgVal
Geography.__init__ = Geography__init__
all_structs.append(Tag)
Tag.thrift_spec = (
None, # 0
(1, TType.STRING, 'name', False, None, 2, ), # 1
(2, TType.MAP, 'props', (TType.STRING,False,TType.STRUCT,[Value, Value.thrift_spec, True]), None, 2, ), # 2
)
Tag.thrift_struct_annotations = {
"cpp.type": "nebula::Tag",
}
Tag.thrift_field_annotations = {
}
def Tag__init__(self, name=None, props=None,):
self.name = name
self.props = props
Tag.__init__ = Tag__init__
def Tag__setstate__(self, state):
state.setdefault('name', None)
state.setdefault('props', None)
self.__dict__ = state
Tag.__getstate__ = lambda self: self.__dict__.copy()
Tag.__setstate__ = Tag__setstate__
all_structs.append(Vertex)
Vertex.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'vid', [Value, Value.thrift_spec, True], None, 2, ), # 1
(2, TType.LIST, 'tags', (TType.STRUCT,[Tag, Tag.thrift_spec, False]), None, 2, ), # 2
)
Vertex.thrift_struct_annotations = {
"cpp.type": "nebula::Vertex",
}
Vertex.thrift_field_annotations = {
}
def Vertex__init__(self, vid=None, tags=None,):
self.vid = vid
self.tags = tags
Vertex.__init__ = Vertex__init__
def Vertex__setstate__(self, state):
state.setdefault('vid', None)
state.setdefault('tags', None)
self.__dict__ = state
Vertex.__getstate__ = lambda self: self.__dict__.copy()
Vertex.__setstate__ = Vertex__setstate__
all_structs.append(Edge)
Edge.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'src', [Value, Value.thrift_spec, True], None, 2, ), # 1
(2, TType.STRUCT, 'dst', [Value, Value.thrift_spec, True], None, 2, ), # 2
(3, TType.I32, 'type', None, None, 2, ), # 3
(4, TType.STRING, 'name', False, None, 2, ), # 4
(5, TType.I64, 'ranking', None, None, 2, ), # 5
(6, TType.MAP, 'props', (TType.STRING,False,TType.STRUCT,[Value, Value.thrift_spec, True]), None, 2, ), # 6
)
Edge.thrift_struct_annotations = {
"cpp.type": "nebula::Edge",
}
Edge.thrift_field_annotations = {
}
def Edge__init__(self, src=None, dst=None, type=None, name=None, ranking=None, props=None,):
self.src = src
self.dst = dst
self.type = type
self.name = name
self.ranking = ranking
self.props = props
Edge.__init__ = Edge__init__
def Edge__setstate__(self, state):
state.setdefault('src', None)
state.setdefault('dst', None)
state.setdefault('type', None)
state.setdefault('name', None)
state.setdefault('ranking', None)
state.setdefault('props', None)
self.__dict__ = state
Edge.__getstate__ = lambda self: self.__dict__.copy()
Edge.__setstate__ = Edge__setstate__
all_structs.append(Step)
Step.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'dst', [Vertex, Vertex.thrift_spec, False], None, 2, ), # 1
(2, TType.I32, 'type', None, None, 2, ), # 2
(3, TType.STRING, 'name', False, None, 2, ), # 3
(4, TType.I64, 'ranking', None, None, 2, ), # 4
(5, TType.MAP, 'props', (TType.STRING,False,TType.STRUCT,[Value, Value.thrift_spec, True]), None, 2, ), # 5
)
Step.thrift_struct_annotations = {
"cpp.type": "nebula::Step",
}
Step.thrift_field_annotations = {
}
def Step__init__(self, dst=None, type=None, name=None, ranking=None, props=None,):
self.dst = dst
self.type = type
self.name = name
self.ranking = ranking
self.props = props
Step.__init__ = Step__init__
def Step__setstate__(self, state):
state.setdefault('dst', None)
state.setdefault('type', None)
state.setdefault('name', None)
state.setdefault('ranking', None)
state.setdefault('props', None)
self.__dict__ = state
Step.__getstate__ = lambda self: self.__dict__.copy()
Step.__setstate__ = Step__setstate__
all_structs.append(Path)
Path.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'src', [Vertex, Vertex.thrift_spec, False], None, 2, ), # 1
(2, TType.LIST, 'steps', (TType.STRUCT,[Step, Step.thrift_spec, False]), None, 2, ), # 2
)
Path.thrift_struct_annotations = {
"cpp.type": "nebula::Path",
}
Path.thrift_field_annotations = {
}
def Path__init__(self, src=None, steps=None,):
self.src = src
self.steps = steps
Path.__init__ = Path__init__
def Path__setstate__(self, state):
state.setdefault('src', None)
state.setdefault('steps', None)
self.__dict__ = state
Path.__getstate__ = lambda self: self.__dict__.copy()
Path.__setstate__ = Path__setstate__
all_structs.append(HostAddr)
HostAddr.thrift_spec = (
None, # 0
(1, TType.STRING, 'host', True, None, 2, ), # 1
(2, TType.I32, 'port', None, None, 2, ), # 2
)
HostAddr.thrift_struct_annotations = {
"cpp.type": "nebula::HostAddr",
}
HostAddr.thrift_field_annotations = {
}
def HostAddr__init__(self, host=None, port=None,):
self.host = host
self.port = port
HostAddr.__init__ = HostAddr__init__
def HostAddr__setstate__(self, state):
state.setdefault('host', None)
state.setdefault('port', None)
self.__dict__ = state
HostAddr.__getstate__ = lambda self: self.__dict__.copy()
HostAddr.__setstate__ = HostAddr__setstate__
all_structs.append(KeyValue)
KeyValue.thrift_spec = (
None, # 0
(1, TType.STRING, 'key', False, None, 2, ), # 1
(2, TType.STRING, 'value', False, None, 2, ), # 2
)
KeyValue.thrift_struct_annotations = {
"cpp.type": "nebula::KeyValue",
}
KeyValue.thrift_field_annotations = {
}
def KeyValue__init__(self, key=None, value=None,):
self.key = key
self.value = value
KeyValue.__init__ = KeyValue__init__
def KeyValue__setstate__(self, state):
state.setdefault('key', None)
state.setdefault('value', None)
self.__dict__ = state
KeyValue.__getstate__ = lambda self: self.__dict__.copy()
KeyValue.__setstate__ = KeyValue__setstate__
all_structs.append(LogInfo)
LogInfo.thrift_spec = (
None, # 0
(1, TType.I64, 'log_id', None, None, 2, ), # 1
(2, TType.I64, 'term_id', None, None, 2, ), # 2
)
LogInfo.thrift_struct_annotations = {
}
LogInfo.thrift_field_annotations = {
}
def LogInfo__init__(self, log_id=None, term_id=None,):
self.log_id = log_id
self.term_id = term_id
LogInfo.__init__ = LogInfo__init__
def LogInfo__setstate__(self, state):
state.setdefault('log_id', None)
state.setdefault('term_id', None)
self.__dict__ = state
LogInfo.__getstate__ = lambda self: self.__dict__.copy()
LogInfo.__setstate__ = LogInfo__setstate__
all_structs.append(DirInfo)
DirInfo.thrift_spec = (
None, # 0
(1, TType.STRING, 'root', False, None, 2, ), # 1
(2, TType.LIST, 'data', (TType.STRING,False), None, 2, ), # 2
)
DirInfo.thrift_struct_annotations = {
}
DirInfo.thrift_field_annotations = {
}
def DirInfo__init__(self, root=None, data=None,):
self.root = root
self.data = data
DirInfo.__init__ = DirInfo__init__
def DirInfo__setstate__(self, state):
state.setdefault('root', None)
state.setdefault('data', None)
self.__dict__ = state
DirInfo.__getstate__ = lambda self: self.__dict__.copy()
DirInfo.__setstate__ = DirInfo__setstate__
all_structs.append(NodeInfo)
NodeInfo.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'host', [HostAddr, HostAddr.thrift_spec, False], None, 2, ), # 1
(2, TType.STRUCT, 'dir', [DirInfo, DirInfo.thrift_spec, False], None, 2, ), # 2
)
NodeInfo.thrift_struct_annotations = {
}
NodeInfo.thrift_field_annotations = {
}
def NodeInfo__init__(self, host=None, dir=None,):
self.host = host
self.dir = dir
NodeInfo.__init__ = NodeInfo__init__
def NodeInfo__setstate__(self, state):
state.setdefault('host', None)
state.setdefault('dir', None)
self.__dict__ = state
NodeInfo.__getstate__ = lambda self: self.__dict__.copy()
NodeInfo.__setstate__ = NodeInfo__setstate__
all_structs.append(PartitionBackupInfo)
PartitionBackupInfo.thrift_spec = (
None, # 0
(1, TType.MAP, 'info', (TType.I32,None,TType.STRUCT,[LogInfo, LogInfo.thrift_spec, False]), None, 2, ), # 1
)
PartitionBackupInfo.thrift_struct_annotations = {
}
PartitionBackupInfo.thrift_field_annotations = {
}
def PartitionBackupInfo__init__(self, info=None,):
self.info = info
PartitionBackupInfo.__init__ = PartitionBackupInfo__init__
def PartitionBackupInfo__setstate__(self, state):
state.setdefault('info', None)
self.__dict__ = state
PartitionBackupInfo.__getstate__ = lambda self: self.__dict__.copy()
PartitionBackupInfo.__setstate__ = PartitionBackupInfo__setstate__
all_structs.append(CheckpointInfo)
CheckpointInfo.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'partition_info', [PartitionBackupInfo, PartitionBackupInfo.thrift_spec, False], None, 2, ), # 1
(2, TType.STRING, 'path', False, None, 2, ), # 2
)
CheckpointInfo.thrift_struct_annotations = {
}
CheckpointInfo.thrift_field_annotations = {
}
def CheckpointInfo__init__(self, partition_info=None, path=None,):
self.partition_info = partition_info
self.path = path
CheckpointInfo.__init__ = CheckpointInfo__init__
def CheckpointInfo__setstate__(self, state):
state.setdefault('partition_info', None)
state.setdefault('path', None)
self.__dict__ = state
CheckpointInfo.__getstate__ = lambda self: self.__dict__.copy()
CheckpointInfo.__setstate__ = CheckpointInfo__setstate__
all_structs.append(LogEntry)
LogEntry.thrift_spec = (
None, # 0
(1, TType.I64, 'cluster', None, None, 2, ), # 1
(2, TType.STRING, 'log_str', False, None, 2, ), # 2
)
LogEntry.thrift_struct_annotations = {
}
LogEntry.thrift_field_annotations = {
}
def LogEntry__init__(self, cluster=None, log_str=None,):
self.cluster = cluster
self.log_str = log_str
LogEntry.__init__ = LogEntry__init__
def LogEntry__setstate__(self, state):
state.setdefault('cluster', None)
state.setdefault('log_str', None)
self.__dict__ = state
LogEntry.__getstate__ = lambda self: self.__dict__.copy()
LogEntry.__setstate__ = LogEntry__setstate__
fix_spec(all_structs)
del all_structs
| 35.730481
| 534
| 0.67576
|
ef2a6009b9c8a6c6aebf963dee82ae5599837c33
| 594
|
py
|
Python
|
morepath/tests/fixtures/template_engine.py
|
timgates42/morepath
|
09972904229f807da75c75d8825af1495057acdc
|
[
"BSD-3-Clause"
] | 314
|
2015-01-01T01:42:52.000Z
|
2022-01-07T21:46:15.000Z
|
morepath/tests/fixtures/template_engine.py
|
timgates42/morepath
|
09972904229f807da75c75d8825af1495057acdc
|
[
"BSD-3-Clause"
] | 369
|
2015-01-02T19:10:40.000Z
|
2021-07-03T04:37:27.000Z
|
morepath/tests/fixtures/template_engine.py
|
timgates42/morepath
|
09972904229f807da75c75d8825af1495057acdc
|
[
"BSD-3-Clause"
] | 37
|
2015-01-11T09:22:02.000Z
|
2021-07-02T20:48:20.000Z
|
import os
class FormatTemplate:
def __init__(self, text):
self.text = text
def render(self, **kw):
return self.text.format(**kw)
class FormatLoader:
def __init__(self, template_directories):
self.template_directories = template_directories
def get(self, name):
for template_directory in self.template_directories:
path = os.path.join(template_directory, name)
if not os.path.exists(path):
continue
with open(path) as f:
return FormatTemplate(f.read())
return None
| 24.75
| 60
| 0.617845
|
6726b0e4cc2e1c161e71ec38896be01735334712
| 30,415
|
py
|
Python
|
networkx/generators/degree_seq.py
|
kalyi/networkx
|
bf1c7cc9b144767523e5abcf84f949d4223848a0
|
[
"BSD-3-Clause"
] | null | null | null |
networkx/generators/degree_seq.py
|
kalyi/networkx
|
bf1c7cc9b144767523e5abcf84f949d4223848a0
|
[
"BSD-3-Clause"
] | null | null | null |
networkx/generators/degree_seq.py
|
kalyi/networkx
|
bf1c7cc9b144767523e5abcf84f949d4223848a0
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
# Copyright (C) 2004-2018 by
# Aric Hagberg <hagberg@lanl.gov>
# Dan Schult <dschult@colgate.edu>
# Pieter Swart <swart@lanl.gov>
# All rights reserved.
# BSD license.
#
# Authors: Aric Hagberg (aric.hagberg@gmail.com)
# Pieter Swart (swart@lanl.gov)
# Dan Schult (dschult@colgate.edu)
# Joel Miller (joel.c.miller.research@gmail.com)
# Nathan Lemons (nlemons@gmail.com)
# Brian Cloteaux (brian.cloteaux@nist.gov)
"""Generate graphs with a given degree sequence or expected degree sequence.
"""
from __future__ import division
import heapq
from itertools import chain
from itertools import combinations
# In Python 3, the function is `zip_longest`, in Python 2 `izip_longest`.
try:
from itertools import zip_longest
except ImportError:
from itertools import izip_longest as zip_longest
import math
from operator import itemgetter
import random
import networkx as nx
from networkx.utils import random_weighted_sample
__all__ = ['configuration_model',
'directed_configuration_model',
'expected_degree_graph',
'havel_hakimi_graph',
'directed_havel_hakimi_graph',
'degree_sequence_tree',
'random_degree_sequence_graph']
chaini = chain.from_iterable
def _to_stublist(degree_sequence):
"""Returns a list of degree-repeated node numbers.
``degree_sequence`` is a list of nonnegative integers representing
the degrees of nodes in a graph.
This function returns a list of node numbers with multiplicities
according to the given degree sequence. For example, if the first
element of ``degree_sequence`` is ``3``, then the first node number,
``0``, will appear at the head of the returned list three times. The
node numbers are assumed to be the numbers zero through
``len(degree_sequence) - 1``.
Examples
--------
>>> degree_sequence = [1, 2, 3]
>>> _to_stublist(degree_sequence)
[0, 1, 1, 2, 2, 2]
If a zero appears in the sequence, that means the node exists but
has degree zero, so that number will be skipped in the returned
list::
>>> degree_sequence = [2, 0, 1]
>>> _to_stublist(degree_sequence)
[0, 0, 2]
"""
return list(chaini([n] * d for n, d in enumerate(degree_sequence)))
def _configuration_model(deg_sequence, create_using, directed=False,
in_deg_sequence=None, seed=None):
"""Helper function for generating either undirected or directed
configuration model graphs.
``deg_sequence`` is a list of nonnegative integers representing the
degree of the node whose label is the index of the list element.
``create_using`` see :func:`~networkx.empty_graph`.
``directed`` and ``in_deg_sequence`` are required if you want the
returned graph to be generated using the directed configuration
model algorithm. If ``directed`` is ``False``, then ``deg_sequence``
is interpreted as the degree sequence of an undirected graph and
``in_deg_sequence`` is ignored. Otherwise, if ``directed`` is
``True``, then ``deg_sequence`` is interpreted as the out-degree
sequence and ``in_deg_sequence`` as the in-degree sequence of a
directed graph.
.. note::
``deg_sequence`` and ``in_deg_sequence`` need not be the same
length.
``seed`` is the seed for the random number generator.
This function returns a graph, directed if and only if ``directed``
is ``True``, generated according to the configuration model
algorithm. For more information on the algorithm, see the
:func:`configuration_model` or :func:`directed_configuration_model`
functions.
"""
if seed is not None:
random.seed(seed)
n = len(deg_sequence)
G = nx.empty_graph(n, create_using)
# If empty, return the null graph immediately.
if n == 0:
return G
# Build a list of available degree-repeated nodes. For example,
# for degree sequence [3, 2, 1, 1, 1], the "stub list" is
# initially [1, 1, 1, 2, 2, 3, 4, 5], that is, node 1 has degree
# 3 and thus is repeated 3 times, etc.
#
# Also, shuffle the stub list in order to get a random sequence of
# node pairs.
if directed:
pairs = zip_longest(deg_sequence, in_deg_sequence, fillvalue=0)
# Unzip the list of pairs into a pair of lists.
out_deg, in_deg = zip(*pairs)
out_stublist = _to_stublist(out_deg)
in_stublist = _to_stublist(in_deg)
random.shuffle(out_stublist)
random.shuffle(in_stublist)
else:
stublist = _to_stublist(deg_sequence)
# Choose a random balanced bipartition of the stublist, which
# gives a random pairing of nodes. In this implementation, we
# shuffle the list and then split it in half.
n = len(stublist)
half = n // 2
random.shuffle(stublist)
out_stublist, in_stublist = stublist[:half], stublist[half:]
G.add_edges_from(zip(out_stublist, in_stublist))
return G
def configuration_model(deg_sequence, create_using=None, seed=None):
"""Return a random graph with the given degree sequence.
The configuration model generates a random pseudograph (graph with
parallel edges and self loops) by randomly assigning edges to
match the given degree sequence.
Parameters
----------
deg_sequence : list of nonnegative integers
Each list entry corresponds to the degree of a node.
create_using : NetworkX graph constructor, optional (default MultiGraph)
Graph type to create. If graph instance, then cleared before populated.
seed : hashable object, optional
Seed for random number generator.
Returns
-------
G : MultiGraph
A graph with the specified degree sequence.
Nodes are labeled starting at 0 with an index
corresponding to the position in deg_sequence.
Raises
------
NetworkXError
If the degree sequence does not have an even sum.
See Also
--------
is_graphical
Notes
-----
As described by Newman [1]_.
A non-graphical degree sequence (not realizable by some simple
graph) is allowed since this function returns graphs with self
loops and parallel edges. An exception is raised if the degree
sequence does not have an even sum.
This configuration model construction process can lead to
duplicate edges and loops. You can remove the self-loops and
parallel edges (see below) which will likely result in a graph
that doesn't have the exact degree sequence specified.
The density of self-loops and parallel edges tends to decrease as
the number of nodes increases. However, typically the number of
self-loops will approach a Poisson distribution with a nonzero mean,
and similarly for the number of parallel edges. Consider a node
with *k* stubs. The probability of being joined to another stub of
the same node is basically (*k* - *1*) / *N*, where *k* is the
degree and *N* is the number of nodes. So the probability of a
self-loop scales like *c* / *N* for some constant *c*. As *N* grows,
this means we expect *c* self-loops. Similarly for parallel edges.
References
----------
.. [1] M.E.J. Newman, "The structure and function of complex networks",
SIAM REVIEW 45-2, pp 167-256, 2003.
Examples
--------
You can create a degree sequence following a particular distribution
by using the one of the distribution functions in
:mod:`~networkx.utils.random_sequence` (or one of your own). For
example, to create an undirected multigraph on one hundred nodes
with degree sequence chosen from the power law distribution:
>>> sequence = nx.random_powerlaw_tree_sequence(100, tries=5000)
>>> G = nx.configuration_model(sequence)
>>> len(G)
100
>>> actual_degrees = [d for v, d in G.degree()]
>>> actual_degrees == sequence
True
The returned graph is a multigraph, which may have parallel
edges. To remove any parallel edges from the returned graph:
>>> G = nx.Graph(G)
Similarly, to remove self-loops:
>>> G.remove_edges_from(nx.selfloop_edges(G))
"""
if sum(deg_sequence) % 2 != 0:
msg = 'Invalid degree sequence: sum of degrees must be even, not odd'
raise nx.NetworkXError(msg)
G = nx.empty_graph(0, create_using, default=nx.MultiGraph)
if G.is_directed():
raise nx.NetworkXNotImplemented('not implemented for directed graphs')
G = _configuration_model(deg_sequence, G, seed=seed)
return G
def directed_configuration_model(in_degree_sequence,
out_degree_sequence,
create_using=None, seed=None):
"""Return a directed_random graph with the given degree sequences.
The configuration model generates a random directed pseudograph
(graph with parallel edges and self loops) by randomly assigning
edges to match the given degree sequences.
Parameters
----------
in_degree_sequence : list of nonnegative integers
Each list entry corresponds to the in-degree of a node.
out_degree_sequence : list of nonnegative integers
Each list entry corresponds to the out-degree of a node.
create_using : NetworkX graph constructor, optional (default MultiDiGraph)
Graph type to create. If graph instance, then cleared before populated.
seed : hashable object, optional
Seed for random number generator.
Returns
-------
G : MultiDiGraph
A graph with the specified degree sequences.
Nodes are labeled starting at 0 with an index
corresponding to the position in deg_sequence.
Raises
------
NetworkXError
If the degree sequences do not have the same sum.
See Also
--------
configuration_model
Notes
-----
Algorithm as described by Newman [1]_.
A non-graphical degree sequence (not realizable by some simple
graph) is allowed since this function returns graphs with self
loops and parallel edges. An exception is raised if the degree
sequences does not have the same sum.
This configuration model construction process can lead to
duplicate edges and loops. You can remove the self-loops and
parallel edges (see below) which will likely result in a graph
that doesn't have the exact degree sequence specified. This
"finite-size effect" decreases as the size of the graph increases.
References
----------
.. [1] Newman, M. E. J. and Strogatz, S. H. and Watts, D. J.
Random graphs with arbitrary degree distributions and their applications
Phys. Rev. E, 64, 026118 (2001)
Examples
--------
One can modify the in- and out-degree sequences from an existing
directed graph in order to create a new directed graph. For example,
here we modify the directed path graph:
>>> D = nx.DiGraph([(0, 1), (1, 2), (2, 3)])
>>> din = list(d for n, d in D.in_degree())
>>> dout = list(d for n, d in D.out_degree())
>>> din.append(1)
>>> dout[0] = 2
>>> # We now expect an edge from node 0 to a new node, node 3.
... D = nx.directed_configuration_model(din, dout)
The returned graph is a directed multigraph, which may have parallel
edges. To remove any parallel edges from the returned graph:
>>> D = nx.DiGraph(D)
Similarly, to remove self-loops:
>>> D.remove_edges_from(nx.selfloop_edges(D))
"""
if sum(in_degree_sequence) != sum(out_degree_sequence):
msg = 'Invalid degree sequences: sequences must have equal sums'
raise nx.NetworkXError(msg)
if create_using is None:
create_using = nx.MultiDiGraph
G = _configuration_model(out_degree_sequence, create_using, directed=True,
in_deg_sequence=in_degree_sequence, seed=seed)
name = "directed configuration_model {} nodes {} edges"
return G
def expected_degree_graph(w, seed=None, selfloops=True):
r"""Return a random graph with given expected degrees.
Given a sequence of expected degrees $W=(w_0,w_1,\ldots,w_{n-1})$
of length $n$ this algorithm assigns an edge between node $u$ and
node $v$ with probability
.. math::
p_{uv} = \frac{w_u w_v}{\sum_k w_k} .
Parameters
----------
w : list
The list of expected degrees.
selfloops: bool (default=True)
Set to False to remove the possibility of self-loop edges.
seed : hashable object, optional
The seed for the random number generator.
Returns
-------
Graph
Examples
--------
>>> z=[10 for i in range(100)]
>>> G=nx.expected_degree_graph(z)
Notes
-----
The nodes have integer labels corresponding to index of expected degrees
input sequence.
The complexity of this algorithm is $\mathcal{O}(n+m)$ where $n$ is the
number of nodes and $m$ is the expected number of edges.
The model in [1]_ includes the possibility of self-loop edges.
Set selfloops=False to produce a graph without self loops.
For finite graphs this model doesn't produce exactly the given
expected degree sequence. Instead the expected degrees are as
follows.
For the case without self loops (selfloops=False),
.. math::
E[deg(u)] = \sum_{v \ne u} p_{uv}
= w_u \left( 1 - \frac{w_u}{\sum_k w_k} \right) .
NetworkX uses the standard convention that a self-loop edge counts 2
in the degree of a node, so with self loops (selfloops=True),
.. math::
E[deg(u)] = \sum_{v \ne u} p_{uv} + 2 p_{uu}
= w_u \left( 1 + \frac{w_u}{\sum_k w_k} \right) .
References
----------
.. [1] Fan Chung and L. Lu, Connected components in random graphs with
given expected degree sequences, Ann. Combinatorics, 6,
pp. 125-145, 2002.
.. [2] Joel Miller and Aric Hagberg,
Efficient generation of networks with given expected degrees,
in Algorithms and Models for the Web-Graph (WAW 2011),
Alan Frieze, Paul Horn, and Paweł Prałat (Eds), LNCS 6732,
pp. 115-126, 2011.
"""
n = len(w)
G = nx.empty_graph(n)
# If there are no nodes are no edges in the graph, return the empty graph.
if n == 0 or max(w) == 0:
return G
if seed is not None:
random.seed(seed)
rho = 1 / sum(w)
# Sort the weights in decreasing order. The original order of the
# weights dictates the order of the (integer) node labels, so we
# need to remember the permutation applied in the sorting.
order = sorted(enumerate(w), key=itemgetter(1), reverse=True)
mapping = {c: u for c, (u, v) in enumerate(order)}
seq = [v for u, v in order]
last = n
if not selfloops:
last -= 1
for u in range(last):
v = u
if not selfloops:
v += 1
factor = seq[u] * rho
p = min(seq[v] * factor, 1)
while v < n and p > 0:
if p != 1:
r = random.random()
v += int(math.floor(math.log(r, 1 - p)))
if v < n:
q = min(seq[v] * factor, 1)
if random.random() < q / p:
G.add_edge(mapping[u], mapping[v])
v += 1
p = q
return G
def havel_hakimi_graph(deg_sequence, create_using=None):
"""Return a simple graph with given degree sequence constructed
using the Havel-Hakimi algorithm.
Parameters
----------
deg_sequence: list of integers
Each integer corresponds to the degree of a node (need not be sorted).
create_using : NetworkX graph constructor, optional (default=nx.Graph)
Graph type to create. If graph instance, then cleared before populated.
Directed graphs are not allowed.
Raises
------
NetworkXException
For a non-graphical degree sequence (i.e. one
not realizable by some simple graph).
Notes
-----
The Havel-Hakimi algorithm constructs a simple graph by
successively connecting the node of highest degree to other nodes
of highest degree, resorting remaining nodes by degree, and
repeating the process. The resulting graph has a high
degree-associativity. Nodes are labeled 1,.., len(deg_sequence),
corresponding to their position in deg_sequence.
The basic algorithm is from Hakimi [1]_ and was generalized by
Kleitman and Wang [2]_.
References
----------
.. [1] Hakimi S., On Realizability of a Set of Integers as
Degrees of the Vertices of a Linear Graph. I,
Journal of SIAM, 10(3), pp. 496-506 (1962)
.. [2] Kleitman D.J. and Wang D.L.
Algorithms for Constructing Graphs and Digraphs with Given Valences
and Factors Discrete Mathematics, 6(1), pp. 79-88 (1973)
"""
if not nx.is_graphical(deg_sequence):
raise nx.NetworkXError('Invalid degree sequence')
p = len(deg_sequence)
G = nx.empty_graph(p, create_using)
if G.is_directed():
raise nx.NetworkXError("Directed graphs are not supported")
num_degs = [[] for i in range(p)]
dmax, dsum, n = 0, 0, 0
for d in deg_sequence:
# Process only the non-zero integers
if d > 0:
num_degs[d].append(n)
dmax, dsum, n = max(dmax, d), dsum + d, n + 1
# Return graph if no edges
if n == 0:
return G
modstubs = [(0, 0)] * (dmax + 1)
# Successively reduce degree sequence by removing the maximum degree
while n > 0:
# Retrieve the maximum degree in the sequence
while len(num_degs[dmax]) == 0:
dmax -= 1
# If there are not enough stubs to connect to, then the sequence is
# not graphical
if dmax > n - 1:
raise nx.NetworkXError('Non-graphical integer sequence')
# Remove largest stub in list
source = num_degs[dmax].pop()
n -= 1
# Reduce the next dmax largest stubs
mslen = 0
k = dmax
for i in range(dmax):
while len(num_degs[k]) == 0:
k -= 1
target = num_degs[k].pop()
G.add_edge(source, target)
n -= 1
if k > 1:
modstubs[mslen] = (k - 1, target)
mslen += 1
# Add back to the list any nonzero stubs that were removed
for i in range(mslen):
(stubval, stubtarget) = modstubs[i]
num_degs[stubval].append(stubtarget)
n += 1
return G
def directed_havel_hakimi_graph(in_deg_sequence,
out_deg_sequence,
create_using=None):
"""Return a directed graph with the given degree sequences.
Parameters
----------
in_deg_sequence : list of integers
Each list entry corresponds to the in-degree of a node.
out_deg_sequence : list of integers
Each list entry corresponds to the out-degree of a node.
create_using : NetworkX graph constructor, optional (default DiGraph)
Graph type to create. If graph instance, then cleared before populated.
Returns
-------
G : DiGraph
A graph with the specified degree sequences.
Nodes are labeled starting at 0 with an index
corresponding to the position in deg_sequence
Raises
------
NetworkXError
If the degree sequences are not digraphical.
See Also
--------
configuration_model
Notes
-----
Algorithm as described by Kleitman and Wang [1]_.
References
----------
.. [1] D.J. Kleitman and D.L. Wang
Algorithms for Constructing Graphs and Digraphs with Given Valences
and Factors Discrete Mathematics, 6(1), pp. 79-88 (1973)
"""
assert(nx.utils.is_list_of_ints(in_deg_sequence))
assert(nx.utils.is_list_of_ints(out_deg_sequence))
# Process the sequences and form two heaps to store degree pairs with
# either zero or nonzero out degrees
sumin, sumout = 0, 0
nin, nout = len(in_deg_sequence), len(out_deg_sequence)
maxn = max(nin, nout)
G = nx.empty_graph(maxn, create_using, default=nx.DiGraph)
if maxn == 0:
return G
maxin = 0
stubheap, zeroheap = [], []
for n in range(maxn):
in_deg, out_deg = 0, 0
if n < nout:
out_deg = out_deg_sequence[n]
if n < nin:
in_deg = in_deg_sequence[n]
if in_deg < 0 or out_deg < 0:
raise nx.NetworkXError(
'Invalid degree sequences. Sequence values must be positive.')
sumin, sumout, maxin = sumin + in_deg, sumout + out_deg, max(maxin, in_deg)
if in_deg > 0:
stubheap.append((-1 * out_deg, -1 * in_deg, n))
elif out_deg > 0:
zeroheap.append((-1 * out_deg, n))
if sumin != sumout:
raise nx.NetworkXError(
'Invalid degree sequences. Sequences must have equal sums.')
heapq.heapify(stubheap)
heapq.heapify(zeroheap)
modstubs = [(0, 0, 0)] * (maxin + 1)
# Successively reduce degree sequence by removing the maximum
while stubheap:
# Remove first value in the sequence with a non-zero in degree
(freeout, freein, target) = heapq.heappop(stubheap)
freein *= -1
if freein > len(stubheap) + len(zeroheap):
raise nx.NetworkXError('Non-digraphical integer sequence')
# Attach arcs from the nodes with the most stubs
mslen = 0
for i in range(freein):
if zeroheap and (not stubheap or stubheap[0][0] > zeroheap[0][0]):
(stubout, stubsource) = heapq.heappop(zeroheap)
stubin = 0
else:
(stubout, stubin, stubsource) = heapq.heappop(stubheap)
if stubout == 0:
raise nx.NetworkXError('Non-digraphical integer sequence')
G.add_edge(stubsource, target)
# Check if source is now totally connected
if stubout + 1 < 0 or stubin < 0:
modstubs[mslen] = (stubout + 1, stubin, stubsource)
mslen += 1
# Add the nodes back to the heaps that still have available stubs
for i in range(mslen):
stub = modstubs[i]
if stub[1] < 0:
heapq.heappush(stubheap, stub)
else:
heapq.heappush(zeroheap, (stub[0], stub[2]))
if freeout < 0:
heapq.heappush(zeroheap, (freeout, target))
return G
def degree_sequence_tree(deg_sequence, create_using=None):
"""Make a tree for the given degree sequence.
A tree has #nodes-#edges=1 so
the degree sequence must have
len(deg_sequence)-sum(deg_sequence)/2=1
"""
# The sum of the degree sequence must be even (for any undirected graph).
degree_sum = sum(deg_sequence)
if degree_sum % 2 != 0:
msg = 'Invalid degree sequence: sum of degrees must be even, not odd'
raise nx.NetworkXError(msg)
if len(deg_sequence) - degree_sum // 2 != 1:
msg = ('Invalid degree sequence: tree must have number of nodes equal'
' to one less than the number of edges')
raise nx.NetworkXError(msg)
G = nx.empty_graph(0, create_using)
if G.is_directed():
raise nx.NetworkXError("Directed Graph not supported")
# Sort all degrees greater than 1 in decreasing order.
#
# TODO Does this need to be sorted in reverse order?
deg = sorted((s for s in deg_sequence if s > 1), reverse=True)
# make path graph as backbone
n = len(deg) + 2
nx.add_path(G, range(n))
last = n
# add the leaves
for source in range(1, n - 1):
nedges = deg.pop() - 2
for target in range(last, last + nedges):
G.add_edge(source, target)
last += nedges
# in case we added one too many
if len(G) > len(deg_sequence):
G.remove_node(0)
return G
def random_degree_sequence_graph(sequence, seed=None, tries=10):
r"""Return a simple random graph with the given degree sequence.
If the maximum degree $d_m$ in the sequence is $O(m^{1/4})$ then the
algorithm produces almost uniform random graphs in $O(m d_m)$ time
where $m$ is the number of edges.
Parameters
----------
sequence : list of integers
Sequence of degrees
seed : hashable object, optional
Seed for random number generator
tries : int, optional
Maximum number of tries to create a graph
Returns
-------
G : Graph
A graph with the specified degree sequence.
Nodes are labeled starting at 0 with an index
corresponding to the position in the sequence.
Raises
------
NetworkXUnfeasible
If the degree sequence is not graphical.
NetworkXError
If a graph is not produced in specified number of tries
See Also
--------
is_graphical, configuration_model
Notes
-----
The generator algorithm [1]_ is not guaranteed to produce a graph.
References
----------
.. [1] Moshen Bayati, Jeong Han Kim, and Amin Saberi,
A sequential algorithm for generating random graphs.
Algorithmica, Volume 58, Number 4, 860-910,
DOI: 10.1007/s00453-009-9340-1
Examples
--------
>>> sequence = [1, 2, 2, 3]
>>> G = nx.random_degree_sequence_graph(sequence)
>>> sorted(d for n, d in G.degree())
[1, 2, 2, 3]
"""
DSRG = DegreeSequenceRandomGraph(sequence, seed=seed)
for try_n in range(tries):
try:
return DSRG.generate()
except nx.NetworkXUnfeasible:
pass
raise nx.NetworkXError('failed to generate graph in %d tries' % tries)
class DegreeSequenceRandomGraph(object):
# class to generate random graphs with a given degree sequence
# use random_degree_sequence_graph()
def __init__(self, degree, seed=None):
if not nx.is_graphical(degree):
raise nx.NetworkXUnfeasible('degree sequence is not graphical')
if seed is not None:
random.seed(seed)
self.degree = list(degree)
# node labels are integers 0,...,n-1
self.m = sum(self.degree) / 2.0 # number of edges
try:
self.dmax = max(self.degree) # maximum degree
except ValueError:
self.dmax = 0
def generate(self):
# remaining_degree is mapping from int->remaining degree
self.remaining_degree = dict(enumerate(self.degree))
# add all nodes to make sure we get isolated nodes
self.graph = nx.Graph()
self.graph.add_nodes_from(self.remaining_degree)
# remove zero degree nodes
for n, d in list(self.remaining_degree.items()):
if d == 0:
del self.remaining_degree[n]
if len(self.remaining_degree) > 0:
# build graph in three phases according to how many unmatched edges
self.phase1()
self.phase2()
self.phase3()
return self.graph
def update_remaining(self, u, v, aux_graph=None):
# decrement remaining nodes, modify auxiliary graph if in phase3
if aux_graph is not None:
# remove edges from auxiliary graph
aux_graph.remove_edge(u, v)
if self.remaining_degree[u] == 1:
del self.remaining_degree[u]
if aux_graph is not None:
aux_graph.remove_node(u)
else:
self.remaining_degree[u] -= 1
if self.remaining_degree[v] == 1:
del self.remaining_degree[v]
if aux_graph is not None:
aux_graph.remove_node(v)
else:
self.remaining_degree[v] -= 1
def p(self, u, v):
# degree probability
return 1 - self.degree[u] * self.degree[v] / (4.0 * self.m)
def q(self, u, v):
# remaining degree probability
norm = float(max(self.remaining_degree.values()))**2
return self.remaining_degree[u] * self.remaining_degree[v] / norm
def suitable_edge(self):
"""Returns True if and only if an arbitrary remaining node can
potentially be joined with some other remaining node.
"""
nodes = iter(self.remaining_degree)
u = next(nodes)
return any(v not in self.graph[u] for v in nodes)
def phase1(self):
# choose node pairs from (degree) weighted distribution
while sum(self.remaining_degree.values()) >= 2 * self.dmax**2:
u, v = sorted(random_weighted_sample(self.remaining_degree, 2))
if self.graph.has_edge(u, v):
continue
if random.random() < self.p(u, v): # accept edge
self.graph.add_edge(u, v)
self.update_remaining(u, v)
def phase2(self):
# choose remaining nodes uniformly at random and use rejection sampling
while len(self.remaining_degree) >= 2 * self.dmax:
norm = float(max(self.remaining_degree.values()))**2
while True:
u, v = sorted(random.sample(self.remaining_degree.keys(), 2))
if self.graph.has_edge(u, v):
continue
if random.random() < self.q(u, v):
break
if random.random() < self.p(u, v): # accept edge
self.graph.add_edge(u, v)
self.update_remaining(u, v)
def phase3(self):
# build potential remaining edges and choose with rejection sampling
potential_edges = combinations(self.remaining_degree, 2)
# build auxiliary graph of potential edges not already in graph
H = nx.Graph([(u, v) for (u, v) in potential_edges
if not self.graph.has_edge(u, v)])
while self.remaining_degree:
if not self.suitable_edge():
raise nx.NetworkXUnfeasible('no suitable edges left')
while True:
u, v = sorted(random.choice(list(H.edges())))
if random.random() < self.q(u, v):
break
if random.random() < self.p(u, v): # accept edge
self.graph.add_edge(u, v)
self.update_remaining(u, v, aux_graph=H)
| 35.080738
| 83
| 0.62913
|
e23cfa5de052cf9f3b182753a7a94b0428029ba1
| 1,198
|
py
|
Python
|
v2.5.7/toontown/hood/MMHoodDataAI.py
|
TTOFFLINE-LEAK/ttoffline
|
bb0e91704a755d34983e94288d50288e46b68380
|
[
"MIT"
] | 4
|
2019-07-01T15:46:43.000Z
|
2021-07-23T16:26:48.000Z
|
v2.5.7/toontown/hood/MMHoodDataAI.py
|
TTOFFLINE-LEAK/ttoffline
|
bb0e91704a755d34983e94288d50288e46b68380
|
[
"MIT"
] | 1
|
2019-06-29T03:40:05.000Z
|
2021-06-13T01:15:16.000Z
|
v2.5.7/toontown/hood/MMHoodDataAI.py
|
TTOFFLINE-LEAK/ttoffline
|
bb0e91704a755d34983e94288d50288e46b68380
|
[
"MIT"
] | 4
|
2019-07-28T21:18:46.000Z
|
2021-02-25T06:37:25.000Z
|
from direct.directnotify import DirectNotifyGlobal
import HoodDataAI
from toontown.toonbase import ToontownGlobals
from toontown.safezone import DistributedTrolleyAI
from toontown.classicchars import DistributedMinnieAI, DistributedWitchMinnieAI, DistributedPlutoAI
from toontown.safezone import DistributedMMPianoAI
class MMHoodDataAI(HoodDataAI.HoodDataAI):
notify = DirectNotifyGlobal.directNotify.newCategory('SZHoodAI')
def __init__(self, air, zoneId=None):
hoodId = ToontownGlobals.MinniesMelodyland
if zoneId == None:
zoneId = hoodId
self.cCharClass = DistributedMinnieAI.DistributedMinnieAI
self.cCharClassAF = DistributedPlutoAI.DistributedPlutoAI
self.cCharClassHW = DistributedWitchMinnieAI.DistributedWitchMinnieAI
self.cCharConfigVar = 'want-pluto'
HoodDataAI.HoodDataAI.__init__(self, air, zoneId, hoodId)
return
def startup(self):
self.notify.info("Creating zone... Minnie's Melodyland")
HoodDataAI.HoodDataAI.startup(self)
trolley = DistributedTrolleyAI.DistributedTrolleyAI(self.air)
trolley.generateWithRequired(self.zoneId)
self.addDistObj(trolley)
| 44.37037
| 99
| 0.764608
|
d2fe353527f7ea85910d614838ed5c84213504c8
| 2,381
|
py
|
Python
|
src/finmag/sim/magnetisation_patterns_test.py
|
davidcortesortuno/finmag
|
9ac0268d2c0e45faf1284cee52a73525aa589e2b
|
[
"BSL-1.0"
] | 10
|
2018-03-24T07:43:17.000Z
|
2022-03-26T10:42:27.000Z
|
src/finmag/sim/magnetisation_patterns_test.py
|
davidcortesortuno/finmag
|
9ac0268d2c0e45faf1284cee52a73525aa589e2b
|
[
"BSL-1.0"
] | 21
|
2018-03-26T15:08:53.000Z
|
2021-07-10T16:11:14.000Z
|
src/finmag/sim/magnetisation_patterns_test.py
|
davidcortesortuno/finmag
|
9ac0268d2c0e45faf1284cee52a73525aa589e2b
|
[
"BSL-1.0"
] | 7
|
2018-04-09T11:50:48.000Z
|
2021-06-10T09:23:25.000Z
|
import numpy as np
import finmag
from finmag.sim.magnetisation_patterns import *
from finmag.util.meshes import cylinder
def test_vortex_functions():
"""
Testing for correct polarity and 'handiness' of the two vortex functions,
vortex_simple() and vortex_feldtkeller()
"""
mesh = cylinder(10, 1, 3)
coords = mesh.coordinates()
def functions(hand, p):
f_simple = vortex_simple(r=10.1, center=(0, 0, 1),
right_handed=hand, polarity=p)
f_feldtkeller = vortex_feldtkeller(beta=15, center=(0, 0, 1),
right_handed=hand, polarity=p)
return [f_simple, f_feldtkeller]
# The polarity test evaluates the function at the mesh coordinates and
# checks that the polarity of z-component from this matches the user input
# polarity
def polarity_test(func, coords, p):
assert(np.alltrue([(p * func(coord)[2] > 0) for coord in coords]))
# This function finds cross product of radius vector and the evaluated
# function vector, rxm. The z- component of this will be:
# - negative for a clockwise vortex
# - positive for a counter-clockwise vortex
# When (rxm)[2] is multiplied by the polarity, p, (rxm)[2] * p is:
# - negative for a left-handed state
# - positive for a right-handed state
def handiness_test(func, coords, hand, p):
r = coords
m = [func(coord) for coord in coords]
cross_product = np.cross(r, m)
if hand is True:
assert(np.alltrue((cross_product[:, 2] * p) > 0))
elif hand is False:
assert(np.alltrue((cross_product[:, 2] * p) < 0))
# run the tests
for hand in [True, False]:
for p in [-1, 1]:
funcs = functions(hand, p)
for func in funcs:
polarity_test(func, coords, p)
handiness_test(func, coords, hand, p)
# Final sanity check: f_simple should yield zero z-coordinate
# outside the vortex core radius, and the magnetisation should
# curl around the center.
f_simple = vortex_simple(r=20, center=(0, 0, 1),
right_handed=True, polarity=1)
assert(np.allclose(f_simple((21, 0, 0)), [0, 1, 0]))
assert(np.allclose(f_simple((-16, 16, 20)),
[-1. / np.sqrt(2), -1. / np.sqrt(2), 0]))
| 38.403226
| 78
| 0.604368
|
d2d978c9ff8c2ed7d8365aa17c2a6124e1027373
| 9,716
|
py
|
Python
|
doc/conf.py
|
xunzi/nagiosplugin
|
050651a39c3ad986367bf93d817fa62a47c15de2
|
[
"ZPL-2.1"
] | 18
|
2020-01-22T15:38:51.000Z
|
2022-03-01T16:17:15.000Z
|
doc/conf.py
|
xunzi/nagiosplugin
|
050651a39c3ad986367bf93d817fa62a47c15de2
|
[
"ZPL-2.1"
] | 30
|
2019-11-07T01:14:07.000Z
|
2022-02-12T16:24:53.000Z
|
doc/conf.py
|
xunzi/nagiosplugin
|
050651a39c3ad986367bf93d817fa62a47c15de2
|
[
"ZPL-2.1"
] | 12
|
2019-11-09T00:24:37.000Z
|
2022-02-19T16:43:58.000Z
|
# -*- coding: utf-8 -*-
#
# nagiosplugin documentation build configuration file, created by
# sphinx-quickstart on Tue Aug 17 21:49:47 2010.
#
# This file is execfile()d with the current directory set to its containing
# dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import re
from distutils.util import convert_path
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
needs_sphinx = '1.1'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.ifconfig']
# Autodoc configuration
autodoc_default_flags = ['members']
autodoc_member_order = 'groupwise'
autoclass_content = 'both'
primary_domain = 'py'
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'nagiosplugin'
copyright = 'Flying Circus Internet Operations GmbH'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The full version, including alpha/beta/rc tags.
ver_ns = {}
ver_path = convert_path('../nagiosplugin/version.py')
with open(ver_path) as ver_file:
exec(ver_file.read(), ver_ns)
release = ver_ns['__VERSION__']
# The short X.Y version.
version = re.sub(r'-.*|[abc]\d+|rc\d+|.dev\d+', '', release)
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
language = 'en'
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
# today = ''
# Else, today_fmt is used as the format for a strftime call.
today_fmt = '%Y-%m-%d'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
default_role = 'py:obj'
# If true, '()' will be appended to :func: etc. cross-reference text.
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# -- Options for HTML output --------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
# html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
html_short_title = 'nagiosplugin v%s' % version
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
# html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
# html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
# html_additional_pages = {}
# If false, no module index is generated.
html_domain_indices = True
# If false, no index is generated.
# html_use_index = True
# If true, the index is split into individual pages for each letter.
# html_split_index = False
# If true, links to the reST sources are added to the pages.
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'nagiosplugindoc'
# -- Options for LaTeX output -------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
# 'papersize': 'a4paper',
# The font size ('10pt', '11pt' or '12pt').
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
# 'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples (source start
# file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'nagiosplugin.tex', u'nagiosplugin Documentation',
u'Flying Circus Internet Operations GmbH', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
# latex_use_parts = False
# If true, show page references after internal links.
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
# latex_appendices = []
# If false, no module index is generated.
# latex_domain_indices = True
# -- Options for manual page output -------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'nagiosplugin', 'nagiosplugin Documentation',
['Flying Circus Internet Operations GmbH'], 1)
]
# If true, show URL addresses after external links.
# man_show_urls = False
# -- Options for Texinfo output -----------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'nagiosplugin', u'nagiosplugin Documentation',
'Flying Circus Internet Operations GmbH', 'nagiosplugin',
'Class library for writing Nagios (Icinga) plugins',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
# texinfo_appendices = []
# If false, no module index is generated.
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
# texinfo_show_urls = 'footnote'
# -- Options for Epub output --------------------------------------------------
# Bibliographic Dublin Core info.
epub_title = u'nagiosplugin'
epub_author = 'Flying Circus Internet Operations GmbH'
epub_publisher = 'Flying Circus Internet Operations GmbH'
epub_copyright = u'Flying Circus Internet Operations GmbH'
# The language of the text. It defaults to the language option
# or en if the language is not set.
# epub_language = ''
# The scheme of the identifier. Typical schemes are ISBN or URL.
# epub_scheme = ''
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
# epub_identifier = ''
# A unique identification for the text.
# epub_uid = ''
# A tuple containing the cover image and cover page html template filenames.
# epub_cover = ()
# HTML files that should be inserted before the pages created by sphinx.
# The format is a list of tuples containing the path and title.
# epub_pre_files = []
# HTML files shat should be inserted after the pages created by sphinx.
# The format is a list of tuples containing the path and title.
# epub_post_files = []
# A list of files that should not be packed into the epub file.
# epub_exclude_files = []
# The depth of the table of contents in toc.ncx.
# epub_tocdepth = 3
# Allow duplicate toc entries.
# epub_tocdup = True
| 32.386667
| 79
| 0.714492
|
b8730c82babdf2d3a9f427362c5e2e4798281b1a
| 66
|
py
|
Python
|
globals.py
|
JahnChoi/market-analysis-tool
|
2dee346460116ad08f4eca4079b095966fd21505
|
[
"MIT"
] | null | null | null |
globals.py
|
JahnChoi/market-analysis-tool
|
2dee346460116ad08f4eca4079b095966fd21505
|
[
"MIT"
] | null | null | null |
globals.py
|
JahnChoi/market-analysis-tool
|
2dee346460116ad08f4eca4079b095966fd21505
|
[
"MIT"
] | null | null | null |
IEX_API_VERSION = ''
IEX_TOKEN = ''
SANDBOX_MODE = False
| 16.5
| 23
| 0.621212
|
7e6afb8a32b15ce36a3a41c1528bc4e915e9deca
| 15,832
|
py
|
Python
|
mmmvi/lib/reporting.py
|
dorbarker/voc-identify
|
f98ec3b2f1c6ce687953cca9cb9ccdaea20ac3a5
|
[
"MIT"
] | 9
|
2021-04-05T22:44:37.000Z
|
2022-02-22T21:56:35.000Z
|
mmmvi/lib/reporting.py
|
melohar/voc-identify
|
f98ec3b2f1c6ce687953cca9cb9ccdaea20ac3a5
|
[
"MIT"
] | 22
|
2021-03-23T00:30:20.000Z
|
2021-09-10T16:50:13.000Z
|
mmmvi/lib/reporting.py
|
melohar/voc-identify
|
f98ec3b2f1c6ce687953cca9cb9ccdaea20ac3a5
|
[
"MIT"
] | 3
|
2021-06-15T23:25:26.000Z
|
2021-09-09T17:49:54.000Z
|
import itertools
import logging
import statistics
from collections import Counter
from pathlib import Path
from typing import Dict, Tuple
import pandas as pd
from mmmvi.lib.types import VoCs, VoCResults, Reads
def one_index_range(position_mutation):
position_range, mutation = position_mutation
if None in position_range:
result = [position_range[0] + 1]
else:
result = [pos + 1 for pos in position_range]
return result
def one_index_results(voc_results: VoCResults) -> VoCResults:
oir = (
pd.DataFrame(voc_results)
.applymap(lambda cell: [one_index_range(group) for group in cell])
.to_dict()
)
return oir
def expand_dataframe_by_reads(df: pd.DataFrame, reads: Reads):
def _expand_rows():
for seq, row in df.iterrows():
for read_name in reads[seq]["reads"]:
yield row._set_name(read_name, inplace=False)
return pd.DataFrame(_expand_rows())
def format_read_report(voc_results: VoCResults, reads: Reads) -> pd.DataFrame:
read_report = pd.DataFrame(one_index_results(voc_results))
has_any_results = read_report.applymap(len).apply(sum, axis="columns") > 0
reads_with_results = read_report[has_any_results]
expanded = expand_dataframe_by_reads(reads_with_results, reads)
expanded["first_pos"] = expanded.apply(
lambda row: sorted(itertools.chain.from_iterable(row))[0], axis=1
)
expanded_sorted = expanded.sort_values(by="first_pos", ascending=True).drop(
"first_pos", axis=1
)
return expanded_sorted
def format_summary(voc_results: VoCResults, vocs: VoCs, reads: Reads) -> pd.DataFrame:
mutation_df = expand_dataframe_by_reads(pd.DataFrame(voc_results), reads)
count_of_reads_with_n_snps = mutation_df.applymap(len).agg(Counter)
summary = (
pd.DataFrame(count_of_reads_with_n_snps.to_dict(),)
.transpose()
.fillna(0)
.applymap(int)
)
summary = summary.reindex(sorted(summary.columns), axis=1)
max_coverage = theoretical_maximum(reads, vocs)
signature_counts = mutation_coverage(voc_results, vocs)
return summary.join(max_coverage).join(signature_counts)
def shannon_entropy():
pass
def mutation_coverage(voc_results, vocs) -> pd.DataFrame:
# For each variant, determines how many its signature mutations
# are represented in the sample.
#
# The exclusive signature mutations are those those mutations
# which are not shared between with any other variant.
nonexclusive = {}
exclusive = {}
report = {}
for voc, mutation_results in voc_results.items():
signatures = set(vocs[voc])
present_mutations = set(
itertools.chain.from_iterable(mutation_results.values())
)
nonexclusive[voc] = {
"maximum": len(signatures),
"present": present_mutations,
"signatures": signatures,
}
for voc, data in nonexclusive.items():
other_sigs = set()
for v in nonexclusive:
if v not in {voc, "reference"}:
other_sigs.update(nonexclusive[v]["signatures"])
exclusive_sigs = data["signatures"].difference(other_sigs)
# janky - fix
exclusive_present = set(p for (p, m) in data["present"]).intersection(
exclusive_sigs
)
exclusive[voc] = {
"maximum": len(exclusive_sigs),
"present": exclusive_present,
}
for voc in nonexclusive.keys():
ne_num = len(nonexclusive[voc]["present"])
ne_denom = nonexclusive[voc]["maximum"]
e_num = len(exclusive[voc]["present"])
e_denom = exclusive[voc]["maximum"]
report[voc] = {
"complete_signature_mutations": f"{ne_num}/{ne_denom}",
"exclusive_signature_mutations": f"{e_num}/{e_denom}",
}
return pd.DataFrame.from_dict(report, orient="index")
def theoretical_maximum(reads: Reads, vocs: VoCs) -> pd.DataFrame:
# Finds the maximum number of mutations for a given variant
# that will fit on the median length read.
#
# The result depends both on read length and on the particular
# genomic positions of the mutations for each variant
all_lengths = []
for seq, read_data in reads.items():
seq_length = read_data["read_obj"].query_alignment_length
for _ in read_data["reads"]:
all_lengths.append(seq_length)
median_read_length = statistics.median(all_lengths)
voc_max = {}
for voc in vocs:
position_ranges = sorted(vocs[voc].keys(), key=lambda x: x[0])
max_covered = 0
for position_range in position_ranges:
start = position_range[0]
end = start + median_read_length
n_covered = sum([p[0] >= start and p[-1] <= end for p in position_ranges])
max_covered = max(n_covered, max_covered)
voc_max[voc] = {"median_maximum_coverage": max_covered}
return pd.DataFrame.from_dict(voc_max, orient="index")
def format_mutation_string(position_range, mutation, wt):
start = position_range[0] + 1 # adjust to 1-based counting for reporting
# insertion
if None in position_range:
insertion_nt = "".join(mutation)
s = f"{start}{insertion_nt}"
# deletion
elif None in mutation:
stop = position_range[-1] + 1
# point deletion
if start == stop:
s = f"[{start}]del"
# multi-base deletion
else:
s = f"[{start}-{stop}]del"
# substitution
else:
wildtype = wt[position_range][0]
wildtype_nt = "".join(wildtype)
variant_nt = "".join(mutation)
s = f"{wildtype_nt}{start}{variant_nt}"
return s
def initialize_matrix(voc, wt):
lookup = {}
mutation_strings = []
for position_range, mutations in voc.items():
lookup[position_range] = {}
for mutation in mutations:
mutation_string = format_mutation_string(position_range, mutation, wt)
lookup[position_range][mutation] = mutation_string
mutation_strings.append(mutation_string)
mx = pd.DataFrame(data=0, index=mutation_strings, columns=mutation_strings)
return lookup, mx
def format_cooccurrence_matrix(mutation_result, voc, wt, reads: Reads):
# For one VoC at a time
lookup, mx = initialize_matrix(voc, wt)
for seq, read_mutations in mutation_result.items():
n_reads = len(reads[seq]["reads"])
for position, mutation in read_mutations:
name = lookup[position][mutation]
mx.loc[name, name] += n_reads
for (row_pos, row_mut), (col_pos, col_mut) in itertools.permutations(
read_mutations, r=2
):
row_name = lookup[row_pos][row_mut]
col_name = lookup[col_pos][col_mut]
mx.loc[row_name, col_name] += n_reads
return mx
def format_relative_cooccurrence_matrix(
cooccurrence_matrix: pd.DataFrame,
) -> pd.DataFrame:
rows = []
for denominator_name in cooccurrence_matrix.columns:
denominator = cooccurrence_matrix.loc[denominator_name, denominator_name]
for numerator_name in cooccurrence_matrix.index:
numerator = cooccurrence_matrix.loc[numerator_name, denominator_name]
try:
quotient = int(numerator) / int(denominator)
except ZeroDivisionError:
quotient = 0.0
rows.append(
{
"denominator": denominator_name,
"numerator": numerator_name,
"ratio": quotient,
}
)
return pd.DataFrame(rows)
def format_relative_cooccurrence_matrices(absolute_cooccurrence_matrices):
return {
v: format_relative_cooccurrence_matrix(mx)
for v, mx in absolute_cooccurrence_matrices.items()
}
def format_cooccurrence_matrices(voc_results: VoCResults, vocs: VoCs, reads: Reads):
*variants, wt = sorted(vocs.keys(), key=lambda x: x == "reference")
return {
v: format_cooccurrence_matrix(voc_results[v], vocs[v], vocs[wt], reads)
for v in variants
}
def format_read_species(voc_results, vocs, reads):
species = {}
total_reads = len(voc_results["reference"].keys())
# get non-redundant set of positions across VOCs
for key, species_data in nonredundant_read_species(voc_results, reads):
positions_mutations = species_data["positions_mutations"]
species_positions, species_mutations = format_positions_mutations(
positions_mutations
)
species[key] = {
"positions": species_positions,
"nucleotides": species_mutations,
"count": species_data["count"],
}
bitarrays = make_voc_bitarray(positions_mutations, vocs)
species[key].update(bitarrays)
read_species = pd.DataFrame.from_dict(species, orient="index")
read_species["proportion_total"] = read_species["count"] / total_reads
overlapping_counts = read_species_overlap(read_species["positions"], reads)
read_species["reads_overlapping"] = [
overlapping_counts[positions] for positions in read_species["positions"]
]
read_species["proportion_overlapping"] = (
read_species["count"] / read_species["reads_overlapping"]
)
return read_species.sort_values(by="count", ascending=False)
def nonredundant_read_species(voc_results, reads: Reads):
# old -> {Voc: {name: mutations}}
# new -> {Voc: {seq: mutations}}
nonredundant_reads = set() # full of sequences
for read_results in voc_results.values():
for seq in read_results.keys():
nonredundant_reads.add(seq)
nonredundant = {}
for seq in nonredundant_reads:
positions_mutations = set()
for voc in voc_results:
positions_mutations.update(voc_results[voc][seq])
positions_mutations = sorted(positions_mutations)
if not positions_mutations:
continue
key = str(positions_mutations)
read_count = len(reads[seq]["reads"])
try:
nonredundant[key]["count"] += read_count
except KeyError:
nonredundant[key] = {
"positions_mutations": positions_mutations,
"count": read_count,
}
yield from nonredundant.items()
def format_positions_mutations(positions_mutations):
species_positions = []
species_mutations = []
for p, m in positions_mutations:
# insertion
if None in p:
species_positions.append((p[0],))
species_mutations.append(tuple("del" if x is None else x for x in m))
# deletion
elif None in m:
species_positions.append(p)
species_mutations.append(tuple("del" for _ in m))
# substitution
else:
species_positions.append(p)
species_mutations.append(m)
species_positions = tuple(
tuple(p + 1 for p in group) for group in species_positions
)
species_mutations = tuple(species_mutations)
return species_positions, species_mutations
def make_voc_bitarray(positions_mutations, vocs: VoCs) -> Dict[str, Tuple[int, ...]]:
# Returns a dictionary with a value of a bit array for each VOC key
#
# The bit array show for each position in the read species if it belongs (1) or not (0)
# to each VOC or the reference.
#
# Consider a read species which covers a potential of 5 mutations which are diagnostic
# for VOCs. In this case, the first position happens to be wild type (reference),
# but the following 4 positions are diagnostic of the "A" variant. However, the "B" variant
# shares the mutations at the 4th and 5th positions with "A". The results would be:
#
# {"reference": (1, 0, 0, 0, 0), "A": (0, 1, 1, 1, 1,), "B": (0, 0, 0, 1, 1)}
bitarrays = {}
for (position, nts), voc in itertools.product(positions_mutations, vocs):
match = int(position in vocs[voc] and nts in vocs[voc][position])
try:
bitarrays[voc].append(match)
except KeyError:
bitarrays[voc] = [match]
return {k: tuple(v) for k, v in bitarrays.items()}
def read_species_overlap(
positions: pd.Series, reads: Reads
) -> Dict[Tuple[int, ...], int]:
# Calculates the number of reads which overlap a read species.
#
# To be considered overlapping, the read must contain
# all of the positions in the species.
overlapping_counts = {species: 0 for species in positions}
for seq, read_data in reads.items():
read_start, *_, read_end = sorted(
read_data["read_obj"].get_reference_positions()
)
# convert 0-based to 1-based
read_start += 1
read_end += 1
n_reads = len(read_data["reads"])
for species_positions in overlapping_counts:
sorted_positions = sorted(itertools.chain.from_iterable(species_positions))
try:
start, *_, stop = sorted_positions
is_overlapping = start >= read_start and stop <= read_end
# a species with a single point mutation
except ValueError:
start, *_ = sorted_positions
is_overlapping = read_end >= start >= read_start
overlapping_counts[species_positions] += is_overlapping * n_reads
return overlapping_counts
def write_summary(voc_results, vocs, reads, outdir, delimiter):
logging.info("Formatting summary")
summary = format_summary(voc_results, vocs, reads)
summary.to_csv(outdir / "summary.txt", sep=delimiter)
def write_read_report(voc_results, reads, outdir, delimiter):
logging.info("Formatting read report")
read_report = format_read_report(voc_results, reads)
read_report.to_csv(outdir / "read_report.txt", sep=delimiter)
def write_read_species(voc_results, vocs, reads, outdir, delimiter):
logging.info("Formatting read species report")
read_species = format_read_species(voc_results, vocs, reads)
read_species.to_csv(outdir / "read_species.txt", sep=delimiter, index=False)
def write_cooccurrence_matrix(
variant: str, directory: Path, data: pd.DataFrame, delimiter: str
) -> None:
variant_out_name = variant.replace("/", "_")
p = directory.joinpath(f"{variant_out_name}.txt")
data.to_csv(p, sep=delimiter)
def write_cooccurrence_matrices(voc_results, vocs, reads, outdir, delimiter):
logging.info("Formatting co-occurrence matrices")
absolute = format_cooccurrence_matrices(voc_results, vocs, reads)
relative = format_relative_cooccurrence_matrices(absolute)
matrices_path = outdir.joinpath("cooccurrence_matrices")
absolute_matrices = matrices_path.joinpath("absolute")
absolute_matrices.mkdir(parents=True, exist_ok=True)
relative_matrices = matrices_path.joinpath("relative")
relative_matrices.mkdir(parents=True, exist_ok=True)
for variant, data in absolute.items():
write_cooccurrence_matrix(variant, absolute_matrices, data, delimiter)
for variant, data in relative.items():
write_cooccurrence_matrix(variant, relative_matrices, data, delimiter)
def write_reports(voc_results, vocs, reads, outdir: Path, delimiter: str):
logging.info("Formatting and writing reports")
outdir.mkdir(exist_ok=True, parents=True)
write_summary(voc_results, vocs, reads, outdir, delimiter)
write_read_report(voc_results, reads, outdir, delimiter)
write_read_species(voc_results, vocs, reads, outdir, delimiter)
write_cooccurrence_matrices(voc_results, vocs, reads, outdir, delimiter)
| 28.890511
| 95
| 0.657782
|
1cafe29e10f0422e18cacc6a07774046e42ed355
| 27,079
|
py
|
Python
|
src/tests/test_dc_models.py
|
BioSystemsUM/DeepMol
|
62904fac46f62ec6231543891efbe52ac7ea1cf1
|
[
"BSD-2-Clause"
] | 9
|
2021-07-10T18:36:04.000Z
|
2022-03-07T12:40:54.000Z
|
src/tests/test_dc_models.py
|
BioSystemsUM/DeepMol
|
62904fac46f62ec6231543891efbe52ac7ea1cf1
|
[
"BSD-2-Clause"
] | null | null | null |
src/tests/test_dc_models.py
|
BioSystemsUM/DeepMol
|
62904fac46f62ec6231543891efbe52ac7ea1cf1
|
[
"BSD-2-Clause"
] | null | null | null |
'''author: Bruno Pereira
date: 28/04/2021
'''
from compoundFeaturization.rdkitFingerprints import MorganFingerprint, MACCSkeysFingerprint, LayeredFingerprint
# from compoundFeaturization.rdkitFingerprints import RDKFingerprint, AtomPairFingerprint
from compoundFeaturization.deepChemFeaturizers import ConvMolFeat, WeaveFeat, CoulombFeat, SmileImageFeat, \
SmilesSeqFeat, MolGraphConvFeat
# from compoundFeaturization.mol2vec import Mol2Vec
# from Datasets.Datasets import NumpyDataset
from loaders.Loaders import CSVLoader
from featureSelection.baseFeatureSelector import LowVarianceFS, KbestFS, PercentilFS, RFECVFS, SelectFromModelFS
from splitters.splitters import RandomSplitter, SingletaskStratifiedSplitter
# from models.sklearnModels import SklearnModel
from models.DeepChemModels import DeepChemModel
from metrics.Metrics import Metric
from metrics.metricsFunctions import roc_auc_score, precision_score, accuracy_score
from parameterOptimization.HyperparameterOpt import HyperparamOpt_Valid
# import preprocessing as preproc
from utils import utils as preproc
# from imbalanced_learn.ImbalancedLearn import RandomOverSampler
# from deepchem.feat import WeaveFeaturizer, CoulombMatrix
# from deepchem.utils.conformers import ConformerGenerator
# from deepchem.trans import IRVTransformer
import numpy as np
# from rdkit import Chem
# ds = MorganFingerprint().featurize(ds)
# ds = MACCSkeysFingerprint().featurize(ds)
# ds = LayeredFingerprint().featurize(ds)
# ds = RDKFingerprint().featurize(ds)
# ds = AtomPairFingerprint().featurize(ds)
# ds = Mol2Vec().featurize(ds)
print('-----------------------------------------------------')
# ds.get_shape()
# ds = LowVarianceFS(0.15).featureSelection(ds)
# ds = KbestFS().featureSelection(ds)
# ds = PercentilFS().featureSelection(ds)
# ds = RFECVFS().featureSelection(ds)
# ds = SelectFromModelFS().featureSelection(ds)
# train_dataset = RandomOverSampler().sample(train_dataset)
# k_folds = splitter.k_fold_split(ds, 3)
# for a, b in k_folds:
# print(a.get_shape())
# print(b.get_shape())
# print('############')
# print(train_dataset.X)
# print(train_dataset.y)
# print(train_dataset.ids)
# print(train_dataset.features)
# print(train_dataset.features2keep)
# def rf_model_builder(n_estimators, max_features, class_weight, model_dir=None):
# rf_model = RandomForestClassifier(n_estimators=n_estimators, max_features=max_features, class_weight=class_weight)
# return SklearnModel(rf_model, model_dir)
#
# params_dict_rf = {"n_estimators": [10, 100],
# "max_features": ["auto", "sqrt", "log2", None],
# "class_weight": [{0: 1., 1: 1.}, {0: 1., 1: 5}, {0: 1., 1: 10}]
# }
#
# def svm_model_builder(C, gamma, kernel, model_dir=None):
# svm_model = SVC(C=C, gamma=gamma, kernel=kernel)
# return SklearnModel(svm_model, model_dir)
#
# params_dict_svm = {'C': [1.0, 0.7, 0.5, 0.3, 0.1],
# 'gamma': ["scale", "auto"],
# 'kernel': ["linear", "rbf"]
# }
#
# optimizer = GridHyperparamOpt(rf_model_builder)
#
# best_rf, best_hyperparams, all_results = optimizer.hyperparam_search(params_dict_rf, train_dataset, valid_dataset, Metric(roc_auc_score))
#
# print('#################')
# print(best_hyperparams)
# print(best_rf)
#
# #print(best_rf.predict(test_dataset))
# print('@@@@@@@@@@@@@@@@')
# print(best_rf.evaluate(test_dataset, metrics))
#
# print(best_rf.predict(test_dataset))
def multitaskclass(dataset):
from deepchem.models import MultitaskClassifier
ds = MorganFingerprint().featurize(dataset)
ds = LowVarianceFS(0.15).featureSelection(ds)
splitter = SingletaskStratifiedSplitter()
train_dataset, valid_dataset, test_dataset = splitter.train_valid_test_split(dataset=ds, frac_train=0.6,
frac_valid=0.2, frac_test=0.2)
multitask = MultitaskClassifier(n_tasks=1, n_features=np.shape(train_dataset.X)[1], layer_sizes=[1000])
model_multi = DeepChemModel(multitask)
# Model training
model_multi.fit(train_dataset)
valid_preds = model_multi.predict(valid_dataset)
test_preds = model_multi.predict(test_dataset)
# Evaluation
metrics = [Metric(roc_auc_score), Metric(precision_score), Metric(accuracy_score)]
print('Training Dataset: ')
train_score = model_multi.evaluate(train_dataset, metrics)
print('Valid Dataset: ')
valid_score = model_multi.evaluate(valid_dataset, metrics)
print('Test Dataset: ')
test_score = model_multi.evaluate(test_dataset, metrics)
return
def graphconvmodel(dataset):
from deepchem.models import GraphConvModel
ds = ConvMolFeat().featurize(dataset)
splitter = SingletaskStratifiedSplitter()
train_dataset, valid_dataset, test_dataset = splitter.train_valid_test_split(dataset=ds, frac_train=0.6,
frac_valid=0.2, frac_test=0.2)
graph = GraphConvModel(n_tasks=1, mode='classification')
model_graph = DeepChemModel(graph)
# Model training
model_graph.fit(train_dataset)
valid_preds = model_graph.predict(valid_dataset)
test_preds = model_graph.predict(test_dataset)
# Evaluation
metrics = [Metric(roc_auc_score), Metric(precision_score), Metric(accuracy_score)]
print('Training Dataset: ')
train_score = model_graph.evaluate(train_dataset, metrics)
print('Valid Dataset: ')
valid_score = model_graph.evaluate(valid_dataset, metrics)
print('Test Dataset: ')
test_score = model_graph.evaluate(test_dataset, metrics)
return
def mpnnmodel(dataset):
from deepchem.models import MPNNModel
ds = WeaveFeat().featurize(dataset)
splitter = SingletaskStratifiedSplitter()
train_dataset, valid_dataset, test_dataset = splitter.train_valid_test_split(dataset=ds, frac_train=0.6,
frac_valid=0.2, frac_test=0.2)
mpnn = MPNNModel(n_tasks=1, n_pair_feat=14, n_atom_feat=75, n_hidden=75, T=1, M=1, mode='classification')
model_mpnn = DeepChemModel(mpnn)
# Model training
model_mpnn.fit(train_dataset)
valid_preds = model_mpnn.predict(valid_dataset)
test_preds = model_mpnn.predict(test_dataset)
# Evaluation
metrics = [Metric(roc_auc_score), Metric(precision_score), Metric(accuracy_score)]
print('Training Dataset: ')
train_score = model_mpnn.evaluate(train_dataset, metrics)
print('Valid Dataset: ')
valid_score = model_mpnn.evaluate(valid_dataset, metrics)
print('Test Dataset: ')
test_score = model_mpnn.evaluate(test_dataset, metrics)
return
def weavemodel(dataset):
from deepchem.models import WeaveModel
ds = WeaveFeat().featurize(dataset)
splitter = SingletaskStratifiedSplitter()
train_dataset, valid_dataset, test_dataset = splitter.train_valid_test_split(dataset=ds, frac_train=0.6,
frac_valid=0.2, frac_test=0.2)
weave = WeaveModel(n_tasks=1, mode='classification')
model_weave = DeepChemModel(weave)
# Model training
model_weave.fit(train_dataset)
valid_preds = model_weave.predict(valid_dataset)
test_preds = model_weave.predict(test_dataset)
# Evaluation
metrics = [Metric(roc_auc_score), Metric(precision_score), Metric(accuracy_score)]
print('Training Dataset: ')
train_score = model_weave.evaluate(train_dataset, metrics)
print('Valid Dataset: ')
valid_score = model_weave.evaluate(valid_dataset, metrics)
print('Test Dataset: ')
test_score = model_weave.evaluate(test_dataset, metrics)
return
def chemcepmodel(dataset):
from deepchem.models import ChemCeption
ds = SmileImageFeat().featurize(dataset)
splitter = SingletaskStratifiedSplitter()
train_dataset, valid_dataset, test_dataset = splitter.train_valid_test_split(dataset=ds, frac_train=0.6,
frac_valid=0.2, frac_test=0.2)
chem = ChemCeptionModel(n_tasks=1, mode='classification')
model_chem = DeepChemModel(chem)
# Model training
model_chem.fit(train_dataset)
valid_preds = model_chem.predict(valid_dataset)
test_preds = model_chem.predict(test_dataset)
# Evaluation
metrics = [Metric(roc_auc_score), Metric(precision_score), Metric(accuracy_score)]
print('Training Dataset: ')
train_score = model_chem.evaluate(train_dataset, metrics)
print('Valid Dataset: ')
valid_score = model_chem.evaluate(valid_dataset, metrics)
print('Test Dataset: ')
test_score = model_chem.evaluate(test_dataset, metrics)
return
def cnnmodel(dataset):
from deepchem.models import CNN
ds = SmileImageFeat().featurize(dataset)
splitter = SingletaskStratifiedSplitter()
train_dataset, valid_dataset, test_dataset = splitter.train_valid_test_split(dataset=ds, frac_train=0.6,
frac_valid=0.2, frac_test=0.2)
cnn = CNNModel(n_tasks=1, n_features=np.shape(ds.X)[1], dims=1)
model_cnn = DeepChemModel(cnn)
# Model training
model_cnn.fit(train_dataset)
valid_preds = model_cnn.predict(valid_dataset)
test_preds = model_cnn.predict(test_dataset)
# Evaluation
metrics = [Metric(roc_auc_score), Metric(precision_score), Metric(accuracy_score)]
print('Training Dataset: ')
train_score = model_cnn.evaluate(train_dataset, metrics)
print('Valid Dataset: ')
valid_score = model_cnn.evaluate(valid_dataset, metrics)
print('Test Dataset: ')
test_score = model_cnn.evaluate(test_dataset, metrics)
return
def smilesvec(dataset):
from deepchem.models import Smiles2Vec
ds = SmileSeqFeat().featurize(dataset)
splitter = SingletaskStratifiedSplitter()
train_dataset, valid_dataset, test_dataset = splitter.train_valid_test_split(dataset=ds, frac_train=0.6,
frac_valid=0.2, frac_test=0.2)
vec = Smiles2Vec(ds.dictionary, n_tasks=1, mode='classification')
model_vec = DeepChemModel(vec)
# Model training
model_vec.fit(train_dataset)
valid_preds = model_vec.predict(valid_dataset)
test_preds = model_vec.predict(test_dataset)
# Evaluation
metrics = [Metric(roc_auc_score), Metric(precision_score), Metric(accuracy_score)]
print('Training Dataset: ')
train_score = model_vec.evaluate(train_dataset, metrics)
print('Valid Dataset: ')
valid_score = model_vec.evaluate(valid_dataset, metrics)
print('Test Dataset: ')
test_score = model_vec.evaluate(test_dataset, metrics)
return
def irvmodel(dataset):
from deepchem.models import MultitaskIRVClassifier
ds = MorganFingerprint().featurize(dataset)
ds = preproc.irv_transformation(ds, K=10, n_tasks=1)
splitter = SingletaskStratifiedSplitter()
train_dataset, valid_dataset, test_dataset = splitter.train_valid_test_split(dataset=ds, frac_train=0.6,
frac_valid=0.2, frac_test=0.2)
irv = MultitaskIRVClassifier(n_tasks=1, mode='classification')
model_irv = DeepChemModel(irv)
# Model training
model_irv.fit(train_dataset)
valid_preds = model_irv.predict(valid_dataset)
test_preds = model_irv.predict(test_dataset)
# Evaluation
metrics = [Metric(roc_auc_score), Metric(precision_score), Metric(accuracy_score)]
print('Training Dataset: ')
train_score = model_irv.evaluate(train_dataset, metrics)
print('Valid Dataset: ')
valid_score = model_irv.evaluate(valid_dataset, metrics)
print('Test Dataset: ')
test_score = model_irv.evaluate(test_dataset, metrics)
return
def gatmodel(dataset):
from deepchem.models import GATModel
ds = MolGraphConvFeat().featurize(dataset)
splitter = SingletaskStratifiedSplitter()
train_dataset, valid_dataset, test_dataset = splitter.train_valid_test_split(dataset=ds, frac_train=0.6,
frac_valid=0.2, frac_test=0.2)
gat = GATModel(n_tasks=1, mode='classification')
model_gat = DeepChemModel(gat)
# Model training
model_gat.fit(train_dataset)
valid_preds = model_gat.predict(valid_dataset)
test_preds = model_gat.predict(test_dataset)
# Evaluation
metrics = [Metric(roc_auc_score), Metric(precision_score), Metric(accuracy_score)]
print('Training Dataset: ')
train_score = model_gat.evaluate(train_dataset, metrics)
print('Valid Dataset: ')
valid_score = model_gat.evaluate(valid_dataset, metrics)
print('Test Dataset: ')
test_score = model_gat.evaluate(test_dataset, metrics)
return
def gcnmodel(dataset):
from deepchem.models import GCNModel
ds = MolGraphConvFeat().featurize(dataset)
splitter = SingletaskStratifiedSplitter()
train_dataset, valid_dataset, test_dataset = splitter.train_valid_test_split(dataset=ds, frac_train=0.6,
frac_valid=0.2, frac_test=0.2)
gcn = CNModel(n_tasks=1, mode='classification')
model_gcn = DeepChemModel(gcn)
# Model training
model_gcn.fit(train_dataset)
valid_preds = model_gcn.predict(valid_dataset)
test_preds = model_gcn.predict(test_dataset)
# Evaluation
metrics = [Metric(roc_auc_score), Metric(precision_score), Metric(accuracy_score)]
print('Training Dataset: ')
train_score = model_gcn.evaluate(train_dataset, metrics)
print('Valid Dataset: ')
valid_score = model_gcn.evaluate(valid_dataset, metrics)
print('Test Dataset: ')
test_score = model_gcn.evaluate(test_dataset, metrics)
return
def attmodel(dataset):
from deepchem.models import AttentiveFPModel
ds = MolGraphConvFeat(use_edges=True).featurize(dataset)
splitter = SingletaskStratifiedSplitter()
train_dataset, valid_dataset, test_dataset = splitter.train_valid_test_split(dataset=ds, frac_train=0.6,
frac_valid=0.2, frac_test=0.2)
att = AttentiveFPModel(n_tasks=1, mode='classification')
model_att = DeepChemModel(att)
# Model training
model_att.fit(train_dataset)
valid_preds = model_att.predict(valid_dataset)
test_preds = model_att.predict(test_dataset)
# Evaluation
metrics = [Metric(roc_auc_score), Metric(precision_score), Metric(accuracy_score)]
print('Training Dataset: ')
train_score = model_att.evaluate(train_dataset, metrics)
print('Valid Dataset: ')
valid_score = model_att.evaluate(valid_dataset, metrics)
print('Test Dataset: ')
test_score = model_att.evaluate(test_dataset, metrics)
return
def dagmodel(dataset):
from deepchem.models import DAGModel
ds = ConvMolFeat().featurize(dataset)
ds = preproc.dag_transformation(ds, max_atoms=150)
splitter = SingletaskStratifiedSplitter()
train_dataset, valid_dataset, test_dataset = splitter.train_valid_test_split(dataset=ds, frac_train=0.6,
frac_valid=0.2, frac_test=0.2)
dag = DAGModel(n_tasks=1, max_atoms=150, mode='classification')
model_dag = DeepChemModel(dag)
# Model training
model_dag.fit(train_dataset)
valid_preds = model_dag.predict(valid_dataset)
test_preds = model_dag.predict(test_dataset)
# Evaluation
metrics = [Metric(roc_auc_score), Metric(precision_score), Metric(accuracy_score)]
print('Training Dataset: ')
train_score = model_dag.evaluate(train_dataset, metrics)
print('Valid Dataset: ')
valid_score = model_dag.evaluate(valid_dataset, metrics)
print('Test Dataset: ')
test_score = model_dag.evaluate(test_dataset, metrics)
return
def graphconvbuilder(graph_conv_layers, dense_layer_size, dropout, model_dir=None):
from deepchem.models import GraphConvModel
graph = GraphConvModel(n_tasks=1,
graph_conv_layers=graph_conv_layers,
dense_layer_size=dense_layer_size,
dropout=dropout)
return DeepChemModel(graph)
def hyperoptimgraph(dataset):
ds = ConvMolFeat().featurize(dataset)
splitter = SingletaskStratifiedSplitter()
train_dataset, valid_dataset, test_dataset = splitter.train_valid_test_split(dataset=ds, frac_train=0.6,
frac_valid=0.2, frac_test=0.2)
params = {'graph_conv_layers': [[64, 64], [72, 72], [84, 84]],
'dense_layer_size': [128, 144, 198],
'dropout': [0.0, 0.25, 0.5]}
optimizer = HyperparamOpt_Valid(graphconvbuilder)
best_rf, best_hyperparams, all_results = optimizer.hyperparam_search(params, train_dataset, valid_dataset,
Metric(roc_auc_score))
metrics = [Metric(roc_auc_score), Metric(precision_score), Metric(accuracy_score)]
print('Best Model: ')
print(best_rf.evaluate(test_dataset, metrics))
return
def mpnnbuilder(n_atom_feat, n_pair_feat, n_hidden, T, M, dropout, model_dir=None):
from deepchem.models import MPNNModel
mpnn = MPNNModel(n_tasks=1,
n_atom_feat=n_atom_feat,
n_pair_feat=n_pair_feat,
n_hidden=n_hidden,
T=T,
M=M,
dropout=dropout,
mode='classification')
return DeepChemModel(mpnn)
def hyperoptimmpnn(dataset):
ds = WeaveFeat().featurize(dataset)
splitter = SingletaskStratifiedSplitter()
train_dataset, valid_dataset, test_dataset = splitter.train_valid_test_split(dataset=ds, frac_train=0.6,
frac_valid=0.2, frac_test=0.2)
params = {'n_atom_feat': [45],
'n_pair_feat': [14],
'n_hidden': [50, 75, 100],
'T': [1, 10],
'M': [1, 10],
'dropout': [0.0, 0.25, 0.5]}
optimizer = HyperparamOpt_Val(mpnnbuilder)
best_rf, best_hyperparams, all_results = optimizer.hyperparam_search(params, train_dataset, valid_dataset,
Metric(roc_auc_score))
metrics = [Metric(roc_auc_score), Metric(precision_score), Metric(accuracy_score)]
print('Best Model: ')
print(best_rf.evaluate(test_dataset, metrics))
return
def gatbuilder(n_attention_heads, dropout, alpha, predictor_hidden_feats, predictor_dropout, number_atom_features,
model_dir=None):
from deepchem.models import GATModel
gat = GATModel(n_tasks=1,
n_attention_heads=n_attention_heads,
dropout=dropout,
alpha=alpha,
predictor_hidden_feats=predictor_hidden_feats,
predictor_dropout=predictor_dropout,
number_atom_features=number_atom_features,
mode='classification')
return DeepChemModel(gat)
def hyperoptimgat(dataset):
ds = MolGraphConvFeat().featurize(dataset)
splitter = SingletaskStratifiedSplitter()
train_dataset, valid_dataset, test_dataset = splitter.train_valid_test_split(dataset=ds, frac_train=0.6,
frac_valid=0.2, frac_test=0.2)
params = {'n_attention_heads': [8, 16],
'dropout': [0.0, 0.25, 0.5],
'alpha': [0.2, 0.4],
'predictor_hidden_feats': [128, 256],
'predictor_dropout': [0.0, 0.25],
'number_atom_features': [30, 45]}
optimizer = HyperparamOpt_Valid(gatbuilder)
best_rf, best_hyperparams, all_results = optimizer.hyperparam_search(params, train_dataset, valid_dataset,
Metric(roc_auc_score))
metrics = [Metric(roc_auc_score), Metric(precision_score), Metric(accuracy_score)]
print('Best Model: ')
print(best_rf.evaluate(test_dataset, metrics))
return
def gcnbuilder(graph_conv_layers, dropout, predictor_hidden_feats, predictor_dropout, number_atom_features,
learning_rate, model_dir=None):
from deepchem.models import GCNModel
gcn = GCNModel(n_tasks=1,
graph_conv_layers=graph_conv_layers,
dropout=dropout,
predictor_hidden_feats=predictor_hidden_feats,
predictor_dropout=predictor_dropout,
number_atom_features=number_atom_features,
learning_rate=learning_rate,
mode='classification')
return DeepChemModel(gcn)
def hyperoptimgcn(dataset):
ds = MolGraphConvFeat().featurize(dataset)
splitter = SingletaskStratifiedSplitter()
train_dataset, valid_dataset, test_dataset = splitter.train_valid_test_split(dataset=ds, frac_train=0.6,
frac_valid=0.2, frac_test=0.2)
params = {'graph_conv_layers': [[64, 64], [72, 72], [84, 84]],
'dropout': [0.0, 0.25, 0.50],
'predictor_hidden_feat': [128, 256],
'predictor_dropout': [0.0, 0.25],
'number_atom_features': [30, 45],
'learning_rate': [0.001, 0.01]}
optimizer = HyperparamOpt_Valid(gcnbuilder)
best_rf, best_hyperparams, all_results = optimizer.hyperparam_search(params, train_dataset, valid_dataset,
Metric(roc_auc_score))
metrics = [Metric(roc_auc_score), Metric(precision_score), Metric(accuracy_score)]
print('Best Model: ')
print(best_rf.evaluate(test_dataset, metrics))
return
def cnnbuilder(n_features, layer_filters, kernel_size, weight_init_stddevs, bias_init_consts, weight_decay_penalty,
dropouts, model_dir=None):
from deepchem.models import CNN
cnn = CNNModel(n_tasks=1,
n_features=n_features,
layer_filters=layer_filters,
kernel_size=kernel_size,
weight_init_stddevs=weight_init_stddevs,
bias_init_consts=bias_init_consts,
weight_decay_penalty=weight_decay_penalty,
dropouts=dropouts)
return DeepChemModel(cnn)
def hyperoptimcnn(dataset):
ds = SmileImageFeat().featurize(dataset)
splitter = SingletaskStratifiedSplitter()
train_dataset, valid_dataset, test_dataset = splitter.train_valid_test_split(dataset=ds, frac_train=0.6,
frac_valid=0.2, frac_test=0.2)
params = {'n_features': np.shape(ds.X)[1],
'layer_filters': [[100], [150], [200]],
'kernel_size': [5, 10],
'weight_init_stddevs': [0.02, 0.04],
'bias_init_consts': [1.0, 2.0],
'weight_decay_penalty': [0.0, 0.25],
'dropouts': [0.25, 0.5, 0.75]}
optimizer = HyperparamOpt_Valid(cnnbuilder)
best_rf, best_hyperparams, all_results = optimizer.hyperparam_search(params, train_dataset, valid_dataset,
Metric(roc_auc_score))
metrics = [Metric(roc_auc_score), Metric(precision_score), Metric(accuracy_score)]
print('Best Model: ')
print(best_rf.evaluate(test_dataset, metrics))
return
def menu():
ds = None
string = '''
This is a file that allows to test multiple DeepChem Models.
1 - Read the dataset (only performed once)
2 - Print shape of dataset
3 - Select a Model (featurization and splitting included)
4 - Hyperparameter optimization
5 - Exit
'''
substring = '''
Models available:
a - MultitaskClassifier
b - GraphConvModel
c - MPNNModel
d - WeaveModel
e - ChemCeption
f - CNN
g - Smiles2Vec
h - MultitaskIRVClassifier
i - GATModel
j - GCNModel
k - AttentiveFPModel
l - DAGModel
m - Return
'''
substring2 = '''
Models available:
a - GraphConvModel
b - MPNNModel
c - GATModel
d - GCNModel
e - CNN
'''
while True:
print(string)
opt = int(input('Option: '))
if opt == 1:
if ds is None:
dataset = CSVLoader(dataset_path='../data/preprocessed_dataset_wfoodb.csv',
mols_field='Smiles',
labels_fields='Class',
id_field='ID') # , shard_size=4000)
ds = dataset.create_dataset()
print('Dataset established')
else:
print('Dataset already read')
elif opt == 2:
if ds is None:
print('A dataset has to be read first')
else:
ds.get_shape()
# print('X: ', X)
# print('y: ', y)
# print('features: ', features)
# print('ids: ', ids)
elif opt == 3 and ds is not None:
print(substring)
opt2 = input('Model (letter): ')
if opt2 == 'a':
multitaskclass(ds)
elif opt2 == 'b':
graphconvmodel(ds)
elif opt2 == 'c':
mpnnmodel(ds)
elif opt2 == 'd':
weavemodel(ds)
elif opt2 == 'e':
chemcepmodel(ds)
elif opt2 == 'f':
cnnmodel(ds)
elif opt2 == 'g':
smilesvec(ds)
elif opt2 == 'h':
irvmodel(ds)
elif opt2 == 'i':
gatmodel(ds)
elif opt2 == 'j':
gcnmodel(ds)
elif opt2 == 'k':
attmodel(ds)
elif opt2 == 'l':
dagmodel(ds)
elif opt2 == 'm':
pass
else:
print('Invalid option')
elif opt == 4:
if ds is None:
print('A dataset has to be read first')
else:
print(substring2)
opt3 = input('Model (letter): ')
if opt3 == 'a':
hyperoptimgraph(ds)
elif opt3 == 'b':
hyperoptimmpnn(ds)
elif opt3 == 'c':
hyperoptimgat(ds)
elif opt3 == 'd':
hyperoptimgcn(ds)
elif opt3 == 'e':
hyperoptimcnn(ds)
elif opt == 5:
break
if __name__ == '__main__':
menu()
| 40.296131
| 139
| 0.633738
|
f742cea0cdb90b0bf9f8e8687b86798a116c4b4d
| 1,468
|
py
|
Python
|
sdk/servicefabric/azure-servicefabric/azure/servicefabric/_configuration.py
|
rsdoherty/azure-sdk-for-python
|
6bba5326677468e6660845a703686327178bb7b1
|
[
"MIT"
] | 2,728
|
2015-01-09T10:19:32.000Z
|
2022-03-31T14:50:33.000Z
|
sdk/servicefabric/azure-servicefabric/azure/servicefabric/_configuration.py
|
rsdoherty/azure-sdk-for-python
|
6bba5326677468e6660845a703686327178bb7b1
|
[
"MIT"
] | 17,773
|
2015-01-05T15:57:17.000Z
|
2022-03-31T23:50:25.000Z
|
sdk/servicefabric/azure-servicefabric/azure/servicefabric/_configuration.py
|
rsdoherty/azure-sdk-for-python
|
6bba5326677468e6660845a703686327178bb7b1
|
[
"MIT"
] | 1,916
|
2015-01-19T05:05:41.000Z
|
2022-03-31T19:36:44.000Z
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest import Configuration
from .version import VERSION
class ServiceFabricClientAPIsConfiguration(Configuration):
"""Configuration for ServiceFabricClientAPIs
Note that all parameters used to create this instance are saved as instance
attributes.
:param credentials: Subscription credentials which uniquely identify
client subscription.
:type credentials: None
:param str base_url: Service URL
"""
def __init__(
self, credentials, base_url=None):
if credentials is None:
raise ValueError("Parameter 'credentials' must not be None.")
if not base_url:
base_url = 'http://localhost:19080'
super(ServiceFabricClientAPIsConfiguration, self).__init__(base_url)
# Starting Autorest.Python 4.0.64, make connection pool activated by default
self.keep_alive = True
self.add_user_agent('azure-servicefabric/{}'.format(VERSION))
self.credentials = credentials
| 33.363636
| 84
| 0.645777
|
4a065cfd680c545b5f2bbb15a94d396634d98162
| 798
|
py
|
Python
|
Projects/Online Workouts/w3resource/Basic - Part-I/program-49.py
|
ivenpoker/Python-Projects
|
2975e1bd687ec8dbcc7a4842c13466cb86292679
|
[
"MIT"
] | 1
|
2019-09-23T15:51:45.000Z
|
2019-09-23T15:51:45.000Z
|
Projects/Online Workouts/w3resource/Basic - Part-I/program-49.py
|
ivenpoker/Python-Projects
|
2975e1bd687ec8dbcc7a4842c13466cb86292679
|
[
"MIT"
] | 5
|
2021-02-08T20:47:19.000Z
|
2022-03-12T00:35:44.000Z
|
Projects/Online Workouts/w3resource/Basic - Part-I/program-49.py
|
ivenpoker/Python-Projects
|
2975e1bd687ec8dbcc7a4842c13466cb86292679
|
[
"MIT"
] | null | null | null |
# !/usr/bin/env python3
###################################################################################
# #
# Program purpose: List all files in a directory. #
# Program Author : Happi Yvan <ivensteinpoker@gmail.com> #
# Creation Date : August 9, 2019 #
# #
###################################################################################
if __name__ == "__main__":
from os import listdir
from os.path import isfile, join
files_list = [f for f in listdir("./") if isfile(join("./", f))]
print(files_list)
| 46.941176
| 83
| 0.295739
|
84bfa0dd7e0fcb5f825d1cf85677835a37c4848b
| 7,767
|
py
|
Python
|
meetings/agenda.py
|
wooriml/proj8-freetimes
|
40bfe3f322aab66c7aae79e96326a79a29c8473c
|
[
"Artistic-2.0"
] | null | null | null |
meetings/agenda.py
|
wooriml/proj8-freetimes
|
40bfe3f322aab66c7aae79e96326a79a29c8473c
|
[
"Artistic-2.0"
] | null | null | null |
meetings/agenda.py
|
wooriml/proj8-freetimes
|
40bfe3f322aab66c7aae79e96326a79a29c8473c
|
[
"Artistic-2.0"
] | null | null | null |
"""
Edited for optimization for CIS322 proj8-freetimes
author: CIS210
Edit: Jenny Lee
no need for datetime importation.
"""
class Appt:
def __init__(self, begin, end):
self._begin = begin
self._end = end
def __repr__(self):
"""returns str representation of self._begin and self._end
alternative way instead of __str__(self)
"""
return (self._begin + " to " + self._end)
def __lt__(self, other):
"""Does this appointment finish before other begins?
Arguments:
other: another Appt
Returns:
True iff this Appt is done by the time other begins.
"""
return self._end <= other._begin
def __gt__(self, other):
"""Does other appointment finish before this begins?
Arguments:
other: another Appt
Returns:
True iff other is done by the time this Appt begins
"""
return other < self
def overlaps(self, other):
"""Is there a non-zero overlap between this appointment
and the other appointment?
Arguments:
other is an Appt
Returns:
True iff there exists some duration (greater than zero)
between this Appt and other.
"""
return not (self < other or other < self)
def intersect(self, other):
"""Return an appointment representing the period in
common between this appointment and another.
Requires self.overlaps(other).
Arguments:
other: Another Appt
Returns:
An appointment representing the time period in common
between self and other. Description of returned Appt
is copied from this (self), unless a non-null string is
provided as desc.
"""
#if desc=="":
# desc = self.desc
assert(self.overlaps(other))
begin_time = max(self._begin, other._begin)
end_time = min(self._end, other._end)
return Appt(begin_time, end_time)
def union(self, other):
"""Return an appointment representing the combined period in
common between this appointment and another.
Requires self.overlaps(other).
Arguments:
other: Another Appt
Returns:
An appointment representing the time period spanning
both self and other. Description of returned Appt
is concatenation of two unless a non-null string is
provided as desc.
"""
#if desc=="":
# desc = self.desc + " " + other.desc
assert(self.overlaps(other))
begin = min(self._begin, other._begin)
end = max(self.end, other.end)
return Appt(begin, end)
"""
_str_ is alternated with _repr_
def __str__(self):
String representation of appointment.
Example:
2012.10.31 13:00 13:50 | CIS 210 lecture
This format is designed to be easily divided
into parts: Split on '|', then split on whitespace,
then split date on '.' and times on ':'.
daystr = self.begin.date().strftime("%Y.%m.%d ")
begstr = self.begin.strftime("%H:%M ")
endstr = self.end.strftime("%H:%M ")
return daystr + begstr + endstr + "| " + self.desc
"""
class Agenda:
"""An Agenda is essentially a list of appointments,
with some agenda-specific methods.
"""
def __init__(self):
"""An empty agenda."""
self._appts = [ ]
def listf(self):
return self._appts
def append(self,appt):
"""Add an Appt to the agenda."""
self.appts.append(appt)
def intersect(self,other):
"""Return a new agenda containing appointments
that are overlaps between appointments in this agenda
and appointments in the other agenda.
Arguments:
other: Another Agenda, to be intersected with this one
"""
#default_desc = (desc == "")
result = Agenda()
for thisappt in self._appts:
for otherappt in other._appts:
if thisappt.overlaps(otherappt):
result.append(thisappt.intersect(otherappt))
return result
def normalize(self):
"""Merge overlapping events in an agenda. For example, if
the first appointment is from 1pm to 3pm, and the second is
from 2pm to 4pm, these two are merged into an appt from
1pm to 4pm, with a combination description.
After normalize, the agenda is in order by date and time,
with no overlapping appointments.
"""
if len(self.appts) == 0:
return
ordering = lambda ap: ap._begin
self.appts.sort(key=ordering)
normalized = [ ]
print("Starting normalization")
cur = self._appts[0]
for appt in self._appts[1:]:
if appt > cur:
# Not overlapping
# print("Gap - emitting ", cur)
normalized.append(cur)
cur = appt
else:
# Overlapping
# print("Merging ", cur, "\n"+
# "with ", appt)
cur = cur.union(appt)
# print("New cur: ", cur)
# print("Last appt: ", cur)
normalized.append(cur)
self._appts = normalized
def normalized(self):
"""
A non-destructive normalize
(like "sorted(l)" vs "l.sort()").
Returns a normalized copy of this agenda.
"""
copy = Agenda()
copy._appts = self._appts
copy.normalize()
return copy
def complement(self, freeblock):
"""Produce the complement of an agenda
within the span of a timeblock represented by
an appointment. For example,
if this agenda is a set of appointments, produce a
new agenda of the times *not* in appointments in
a given time period.
Args:
freeblock: Looking for time blocks in this period
that are not conflicting with appointments in
this agenda.
Returns:
A new agenda containing exactly the times that
are within the period of freeblock and
not within appointments in this agenda. The
description of the resulting appointments comes
from freeblock.desc.
"""
copy = self.normalized()
comp = Agenda()
cur_time = freeblock._begin
for appt in copy._appts:
if appt < freeblock:
continue
if appt > freeblock:
if cur_time < freeblock._end:
comp.append(Appt(cur_time,freeblock._end))
cur_time = freeblock._end
break
if cur_time < appt._begin:
comp.append(Appt(cur_time, appt._begin))
cur_time = max(appt._end,cur_time)
if cur_time < freeblock._end:
comp.append(Appt(cur_time, freeblock._end))
return comp
def appendation(self):
"""append dictionary by iteration with the readable object
"""
res = []
for appt in self._appts:
res.append({"startingtime": appt._begin.format("MM/DD.YYYY HH:mm"),
"endingtime": appt._end.format("MM/DD/YYYY HH:mm")})
return res
def __len__(self):
"""Number of appointments, callable as built-in len() function"""
return len(self.appts)
def __iter__(self):
"""An iterator through the appointments in this agenda."""
return self.appts.__iter__()
| 30.821429
| 79
| 0.567916
|
a4a88d87d2f695b990c1b3cfd1e5abf4e88b008d
| 4,309
|
py
|
Python
|
apps/benchmark/arm_cpu_imagenet_bench.py
|
mwillsey/incubator-tvm
|
e02dc69fef294eb73dd65d18949ed9e108f60cda
|
[
"Apache-2.0"
] | 2
|
2020-04-17T02:25:16.000Z
|
2020-11-25T11:39:43.000Z
|
apps/benchmark/arm_cpu_imagenet_bench.py
|
mwillsey/incubator-tvm
|
e02dc69fef294eb73dd65d18949ed9e108f60cda
|
[
"Apache-2.0"
] | 3
|
2020-04-20T15:37:55.000Z
|
2020-05-13T05:34:28.000Z
|
apps/benchmark/arm_cpu_imagenet_bench.py
|
mwillsey/incubator-tvm
|
e02dc69fef294eb73dd65d18949ed9e108f60cda
|
[
"Apache-2.0"
] | null | null | null |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Benchmark script for ImageNet models on ARM CPU.
see README.md for the usage and results of this script.
"""
import argparse
import numpy as np
import tvm
from tvm import te
from tvm.contrib.util import tempdir
import tvm.contrib.graph_runtime as runtime
from tvm import relay
from util import get_network, print_progress
def evaluate_network(network, target, target_host, repeat):
# connect to remote device
tracker = tvm.rpc.connect_tracker(args.host, args.port)
remote = tracker.request(args.rpc_key)
print_progress(network)
net, params, input_shape, output_shape = get_network(network, batch_size=1)
print_progress("%-20s building..." % network)
with tvm.transform.PassContext(opt_level=3):
graph, lib, params = relay.build(net, target=target, target_host=target_host, params=params)
tmp = tempdir()
if "android" in str(target):
from tvm.contrib import ndk
filename = "%s.so" % network
lib.export_library(tmp.relpath(filename), ndk.create_shared)
else:
filename = "%s.tar" % network
lib.export_library(tmp.relpath(filename))
# upload library and params
print_progress("%-20s uploading..." % network)
ctx = remote.context(str(target), 0)
remote.upload(tmp.relpath(filename))
rlib = remote.load_module(filename)
module = runtime.create(graph, rlib, ctx)
data_tvm = tvm.nd.array((np.random.uniform(size=input_shape)).astype(dtype))
module.set_input("data", data_tvm)
module.set_input(**params)
# evaluate
print_progress("%-20s evaluating..." % network)
ftimer = module.module.time_evaluator("run", ctx, number=1, repeat=repeat)
prof_res = np.array(ftimer().results) * 1000 # multiply 1000 for converting to millisecond
print(
"%-20s %-19s (%s)" % (network, "%.2f ms" % np.mean(prof_res), "%.2f ms" % np.std(prof_res))
)
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument(
"--network",
type=str,
choices=[
"resnet-18",
"resnet-34",
"resnet-50",
"vgg-16",
"vgg-19",
"densenet-121",
"inception_v3",
"mobilenet",
"squeezenet_v1.0",
"squeezenet_v1.1",
],
help="The name of neural network",
)
parser.add_argument(
"--model",
type=str,
choices=["rk3399", "mate10", "mate10pro", "p20", "p20pro", "pixel2", "rasp3b", "pynq"],
default="rk3399",
help="The model of the test device. If your device is not listed in "
"the choices list, pick the most similar one as argument.",
)
parser.add_argument("--host", type=str, default="localhost")
parser.add_argument("--port", type=int, default=9190)
parser.add_argument("--rpc-key", type=str, required=True)
parser.add_argument("--repeat", type=int, default=10)
args = parser.parse_args()
dtype = "float32"
if args.network is None:
networks = ["squeezenet_v1.1", "mobilenet", "resnet-18", "vgg-16"]
else:
networks = [args.network]
target = tvm.target.arm_cpu(model=args.model)
target_host = None
print("--------------------------------------------------")
print("%-20s %-20s" % ("Network Name", "Mean Inference Time (std dev)"))
print("--------------------------------------------------")
for network in networks:
evaluate_network(network, target, target_host, args.repeat)
| 35.03252
| 100
| 0.645161
|
3805356e4f8d0771b42bbfc8c0e72720ee44f2e8
| 16,019
|
py
|
Python
|
encapsia_cli/plugininfo.py
|
tcorbettclark/encapsia-cli
|
1e2ed8857c2224f6b80478b1ca09b268fbdd85f6
|
[
"MIT"
] | 2
|
2019-06-12T09:49:52.000Z
|
2020-10-23T13:28:27.000Z
|
encapsia_cli/plugininfo.py
|
tcorbettclark/encapsia-cli
|
1e2ed8857c2224f6b80478b1ca09b268fbdd85f6
|
[
"MIT"
] | 49
|
2019-07-02T14:50:43.000Z
|
2022-03-31T12:17:16.000Z
|
encapsia_cli/plugininfo.py
|
tcorbettclark/encapsia-cli
|
1e2ed8857c2224f6b80478b1ca09b268fbdd85f6
|
[
"MIT"
] | 5
|
2019-02-28T08:16:03.000Z
|
2021-06-07T10:08:56.000Z
|
from __future__ import annotations
import collections
import re
import typing as T
from dataclasses import dataclass
from functools import total_ordering
from pathlib import Path
from warnings import warn
import arrow
import semver
from encapsia_cli import lib, s3
ALLOWED_PLUGIN_NAME = "[a-z][a-z0-9_]*"
ALLOWED_VERSION = "[0-9][a-zA-Z0-9.+-]*"
ALLOWED_VARIANT = "[a-zA-Z0-9_]+"
T_VersionDict = T.Union[T.Dict[str, str], T.Dict[str, T.Dict[str, T.Any]]]
T_AnyVariant = T.NewType("T_AnyVariant", object)
T_Variant = T.Union[str, T_AnyVariant]
class TooManyVariantTagsError(Exception):
pass
class InvalidSpecError(Exception):
pass
def _format_datetime(dt):
return arrow.get(dt).strftime("%a %d %b %Y %H:%M:%S")
# In Python 3.7 and beyond we could do the following. But we want to support Python 3.6.
# return datetime.datetime.fromisoformat(dt).strftime("%a %d %b %Y %H:%M:%S")
def get_variant_from_tags(tags):
variant_tags = [t for t in tags if t.startswith("variant=")]
if len(variant_tags) == 0:
variant = None
elif len(variant_tags) == 1:
variant = variant_tags[0].split("=", 1)[1]
else:
raise TooManyVariantTagsError("Found more than one variant tags.")
return variant
@total_ordering
class PluginInfo:
"""Parse and use plugin information like name, variant and version."""
PLUGIN_FILENAME_REGEX = re.compile(
rf"^.*plugin-({ALLOWED_PLUGIN_NAME})(?:-variant-({ALLOWED_VARIANT}))?-({ALLOWED_VERSION})\.tar\.gz$"
)
FOUR_DIGIT_VERSION_REGEX = re.compile(r"([0-9]+)\.([0-9]+)\.([0-9]+)\.([0-9]+)")
DEV_VERSION_REGEX = re.compile(r"([0-9]+)\.([0-9]+)\.([0-9]+)dev([0-9]+)")
def __init__(
self,
s3_bucket: T.Optional[str],
s3_path: T.Optional[str],
name: str,
version: str,
variant: T.Optional[str],
):
"""Private constructor. Use make_* factory methods instead."""
self.s3_bucket = s3_bucket
self.s3_path = s3_path
self.name = name
self.version = version
self.semver = self._parse_version(self.version)
self.variant = "" if variant is None else variant
self.extras: T.Dict[str, str] = {}
def __eq__(self, other) -> bool:
if isinstance(other, PluginInfo):
return (
self.name == other.name
and self.semver == other.semver
and self.variant == other.variant
)
return NotImplemented
def __lt__(self, other) -> bool:
if isinstance(other, PluginInfo):
self_values = (self.name, self.variant, self.semver)
other_values = (other.name, other.variant, other.semver)
return self_values < other_values
return NotImplemented
def __str__(self):
return self.get_filename()
def __repr__(self):
return (
f"PluginInfo({self.s3_bucket!r}, {self.s3_path!r}, "
f"{self.name!r}, {self.variant!r}, {self.version!r})"
)
@classmethod
def get_name_variant_version_from_filename(cls, filename):
m = cls.PLUGIN_FILENAME_REGEX.match(str(filename))
if m is None:
raise ValueError(f"Unable to parse: {filename}")
return m.group(1), m.group(2), m.group(3) # (name, variant, version)
@classmethod
def make_from_name_version(cls, name, version) -> PluginInfo:
warn(
"Use make_from_name_variant_version() instead.",
category=DeprecationWarning,
)
return cls(None, None, name, version, variant=None)
@classmethod
def make_from_name_variant_version(cls, name, variant, version) -> PluginInfo:
return cls(None, None, name, version, variant=variant)
@classmethod
def make_from_filename(cls, filename: T.Union[str, Path]) -> PluginInfo:
name, variant, version = cls.get_name_variant_version_from_filename(filename)
return cls(None, None, name, version, variant=variant)
@classmethod
def make_from_s3(cls, s3_bucket, s3_path):
name, variant, version = cls.get_name_variant_version_from_filename(s3_path)
s3_path_without_filename = "/".join(s3_path.split("/")[:-1])
return cls(s3_bucket, s3_path_without_filename, name, version, variant=variant)
@classmethod
def make_from_spec(cls, spec: PluginSpec) -> PluginInfo:
if spec.variant is PluginSpec.ANY_VARIANT:
raise ValueError("Cannot make PluginInfo from PluginSpec with ANY variant")
return cls(None, None, spec.name, spec.version_prefix, str(spec.variant))
@classmethod
def _parse_version(cls, version):
# Consider a 4th digit to be a SemVer pre-release.
# E.g. 1.2.3.4 is 1.2.3-4
m = cls.FOUR_DIGIT_VERSION_REGEX.match(version)
if m:
major, minor, patch, prerelease = m.groups()
return semver.VersionInfo(
major=major, minor=minor, patch=patch, prerelease=prerelease
)
# Consider a "dev" build to be a SemVer pre-release.
# E.g. 0.0.209dev12 is 0.0.209-12
m = cls.DEV_VERSION_REGEX.match(version)
if m:
major, minor, patch, prerelease = m.groups()
return semver.VersionInfo(
major=major, minor=minor, patch=patch, prerelease=prerelease
)
# Otherwise hope that the semver package can deal with it.
try:
return semver.VersionInfo.parse(version)
except ValueError as e:
lib.log_error(str(e))
# At least return something comparable.
return semver.VersionInfo(major=0)
def formatted_version(self) -> str:
version, semver = self.version, str(self.semver)
return semver if semver == version else f"{version} ({semver})"
def name_and_variant(self) -> str:
variant_str = f" [{self.variant}]" if self.variant else ""
return f"{self.name}{variant_str}"
def get_filename(self) -> str:
variant = f"-variant-{self.variant}" if self.variant else ""
return f"plugin-{self.name}{variant}-{self.version}.tar.gz"
def get_s3_bucket(self) -> T.Optional[str]:
return self.s3_bucket
def get_s3_path(self) -> T.Optional[str]:
return self.s3_path
def get_s3_name(self) -> str:
if self.s3_path:
return f"{self.s3_path}/{self.get_filename()}"
else:
# In the unlikely scenario that plugin files are stored flat in a bucket.
return self.get_filename()
class PluginInfos:
"""Container for one or more PluginInfo."""
def __init__(self, plugin_infos: T.Iterable[PluginInfo]):
self.pis = list(plugin_infos)
def __iter__(self):
return iter(self.pis)
def __repr__(self):
return f"PluginInfos({self.pis!r})"
@staticmethod
def make_from_local_store(plugins_local_dir):
result = plugins_local_dir.glob("plugin-*-*.tar.gz")
pis = []
for p in result:
try:
pis.append(PluginInfo.make_from_filename(p))
except ValueError as e:
lib.log_error(str(e))
return PluginInfos(pis)
@staticmethod
def make_from_s3_buckets(plugins_s3_buckets):
try:
return PluginInfos(
[
PluginInfo.make_from_s3(bucket, x["Key"])
for bucket, x in s3.list_buckets(plugins_s3_buckets)
if x["Key"].endswith(".tar.gz")
]
)
except s3.S3Error as e:
lib.log_error(str(e), abort=True)
return None # Never reached, but keep linters happy
@staticmethod
def make_from_encapsia(host: str) -> PluginInfos:
# TODO: use pluginsmanager.plugins() if it exists
api = lib.get_api(host=host)
raw_info = api.run_view(
"pluginsmanager",
"installed_plugins_with_tags",
)
pis = []
for i in raw_info:
tags = i.get("plugin_tags")
if not isinstance(tags, list):
tags = []
try:
variant = get_variant_from_tags(tags)
except TooManyVariantTagsError as e:
lib.log_error(f"Error in {i['name']} tag list: {e}")
pi = PluginInfo.make_from_name_variant_version(
i["name"], variant, i["version"]
)
pi.extras.update(
{
"description": i["description"],
"installed": _format_datetime(i["when"]),
"plugin-tags": ", ".join(sorted(tags)),
}
)
pis.append(pi)
return PluginInfos(pis)
def latest(self) -> T.Optional[PluginInfo]:
"""Returns greatest PluginInfo with in sort order (name, variant, version).
Careful: this has little value when comparing plugins with different name and
variant!
"""
return max(self.pis, default=None)
def filter_to_latest(self) -> PluginInfos:
groupped_pis = collections.defaultdict(list)
for pi in self.pis:
groupped_pis[(pi.name, pi.variant)].append(pi)
return PluginInfos(
p
for pis in groupped_pis.values()
if (p := PluginInfos(pis).latest()) is not None
)
def latest_version_matching_spec(self, spec) -> T.Optional[PluginInfo]:
return PluginSpec.make_from_spec_or_string(spec).filter(self).latest()
@dataclass
class PluginSpec:
name: str
variant: T_Variant
version_prefix: str = ""
exact_match: bool = False
PLUGIN_SPEC_NVV_REGEX: T.ClassVar[re.Pattern] = re.compile(
rf"^({ALLOWED_PLUGIN_NAME})(?:-variant-({ALLOWED_VARIANT}))?(?:-({ALLOWED_VERSION}))?$"
)
PLUGIN_SPEC_ANY_REGEX: T.ClassVar[re.Pattern] = re.compile(
rf"^({ALLOWED_PLUGIN_NAME})(?i:-ANY)(?:-({ALLOWED_VERSION}))?$"
)
ANY_VARIANT: T.ClassVar[T_Variant] = T_AnyVariant(object())
def __post_init__(self):
if self.variant is None:
self.variant = ""
if self.version_prefix is None:
self.version_prefix = ""
def __str__(self):
if self.variant is self.ANY_VARIANT:
variant = "-ANY"
elif self.variant:
variant = f"-variant-{self.variant}"
else:
variant = ""
if self.version_prefix:
version = f"-{self.version_prefix}"
else:
version = ""
if self.exact_match:
exact = " [exact]"
else:
exact = ""
return f"{self.name}{variant}{version}{exact}"
@classmethod
def _split_spec_string(cls, spec_string: str) -> T.Tuple[str, T_Variant, str]:
"""Split `spec_string` into components. A spec string can take three forms:
* <plugin_name>
* <plugin_name>-ANY ("ANY" is case insensitive)
* <plugin_name>-<version_prefix>
* <plugin_name>-variant-<variant_name>-<version_prefix>
"""
m = cls.PLUGIN_SPEC_NVV_REGEX.match(spec_string)
if m:
return m.group(1), m.group(2), m.group(3) # name, variant, version_prefix
m = cls.PLUGIN_SPEC_ANY_REGEX.match(spec_string)
if m:
return m.group(1), cls.ANY_VARIANT, m.group(2) # name, ANY, version
raise InvalidSpecError(f"Spec string {spec_string} is invalid.")
@classmethod
def make_from_string(
cls, spec_string: str, exact_match: bool = False
) -> PluginSpec:
return cls(*cls._split_spec_string(spec_string), exact_match=exact_match)
@classmethod
def make_from_spec_or_string(
cls, spec_or_string: T.Union[str, PluginSpec]
) -> PluginSpec:
if isinstance(spec_or_string, str):
instance = cls.make_from_string(spec_or_string)
elif isinstance(spec_or_string, PluginSpec):
instance = cls(
spec_or_string.name,
spec_or_string.variant,
spec_or_string.version_prefix,
exact_match=spec_or_string.exact_match,
)
else:
raise TypeError(f"Unknown spec type {type(spec_or_string)}")
return instance
@classmethod
def make_from_plugininfo(
cls, plugininfo: PluginInfo, exact_match: bool = True
) -> PluginSpec:
return cls(
plugininfo.name,
plugininfo.variant,
plugininfo.version,
exact_match=exact_match,
)
def _variant_match(self, pi: PluginInfo) -> bool:
return self.variant is self.ANY_VARIANT or self.variant == pi.variant
def _version_match(self, pi: PluginInfo) -> bool:
return (
self.version_prefix == pi.version
if self.exact_match
else pi.version.startswith(self.version_prefix)
)
def match(self, pi: PluginInfo) -> bool:
return (
self.name == pi.name and self._variant_match(pi) and self._version_match(pi)
)
def filter(self, plugin_infos: PluginInfos) -> PluginInfos:
return PluginInfos(filter(self.match, plugin_infos))
def as_version_dict(self) -> T_VersionDict:
version_dict: T_VersionDict
if self.variant:
if self.exact_match:
version_dict = {
self.name: {"version": self.version_prefix, "variant": self.variant}
}
else:
version_dict = {
self.name: {
"version": self.version_prefix,
"variant": self.variant,
"exact": False,
}
}
elif self.exact_match:
version_dict = {self.name: self.version_prefix}
else:
version_dict = {self.name: {"version": self.version_prefix, "exact": False}}
return version_dict
class PluginSpecs:
def __init__(self, specs: T.Iterable[PluginSpec]):
self.specs = [PluginSpec.make_from_spec_or_string(s) for s in specs]
def __iter__(self):
return iter(self.specs)
def __repr__(self):
return f"PluginSpecs({self.specs!r})"
@classmethod
def make_from_version_dict(cls, versions: dict) -> PluginSpecs:
specs = []
for name, definition in versions.items():
if isinstance(definition, str):
version, variant, exact = definition, "", True
elif isinstance(definition, dict):
version = definition["version"]
variant = definition.get("variant", "")
exact = definition.get("exact", True)
else:
raise TypeError(f"Unknown definition type {type(definition)}")
specs.append(PluginSpec(name, variant, version, exact_match=exact))
return cls(specs)
@classmethod
def make_from_spec_strings(cls, spec_strings: T.Iterable[str]) -> PluginSpecs:
return cls(
PluginSpec.make_from_string(spec_string) for spec_string in spec_strings
)
@classmethod
def make_from_plugininfos(cls, plugin_infos: PluginInfos) -> PluginSpecs:
return cls([PluginSpec.make_from_plugininfo(pi) for pi in plugin_infos])
def as_version_dict(self) -> T_VersionDict:
return {k: v for s in self for k, v in s.as_version_dict().items()}
def as_plugininfos(self) -> PluginInfos:
return PluginInfos(
[
PluginInfo.make_from_name_variant_version(
s.name, s.variant, s.version_prefix
)
for s in self
]
)
def match_any(self, plugin_info: PluginInfo) -> bool:
return any(s.match(plugin_info) for s in self)
def filter(self, plugin_infos: PluginInfos) -> PluginInfos:
return PluginInfos(filter(self.match_any, plugin_infos))
| 34.449462
| 108
| 0.601661
|
2b6dba45b5c19209feb9daa13f3142d50bc022b8
| 2,283
|
py
|
Python
|
daisychain/channel_dropbox/migrations/0001_initial.py
|
daisychainme/daisychain
|
245d0041f1efd2d6cc110f60aebf2e2dee98bcdb
|
[
"MIT"
] | 5
|
2016-09-27T10:44:59.000Z
|
2022-03-29T08:16:44.000Z
|
daisychain/channel_dropbox/migrations/0001_initial.py
|
daisychainme/daisychain
|
245d0041f1efd2d6cc110f60aebf2e2dee98bcdb
|
[
"MIT"
] | null | null | null |
daisychain/channel_dropbox/migrations/0001_initial.py
|
daisychainme/daisychain
|
245d0041f1efd2d6cc110f60aebf2e2dee98bcdb
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by Django 1.9.6 on 2016-08-17 09:06
# Generated by Django 1.9.6 on 2016-07-14 17:19
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='DropboxAccount',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('access_token', models.CharField(max_length=255, verbose_name='Access Token')),
('cursor', models.CharField(max_length=255, verbose_name='Cursor')),
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': 'Dropbox Account',
'verbose_name_plural': 'Dropbox Accounts',
},
),
migrations.CreateModel(
name='DropboxUser',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('dropbox_userid', models.CharField(max_length=255, verbose_name='DropBox User ID')),
('display_name', models.CharField(max_length=100, verbose_name='DropBox Display Name')),
('email', models.CharField(max_length=100, verbose_name='email')),
('profile_photo_url', models.CharField(max_length=255, null=True, verbose_name='email')),
('disk_used', models.DecimalField(decimal_places=4, max_digits=12, verbose_name='Used Disk Space')),
('disk_allocated', models.DecimalField(decimal_places=4, max_digits=12, verbose_name='Total Allocated Disk Usage')),
('dropbox_account', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='channel_dropbox.DropboxAccount')),
],
options={
'verbose_name': 'Dropbox User',
'verbose_name_plural': 'Dropbox Users',
},
),
]
| 43.075472
| 137
| 0.622427
|
7f123bf95e68a4532e05a08101442765f72e7492
| 717
|
py
|
Python
|
netnir/core/tasks/inventory.py
|
jtdub/netnir
|
9d2c3467cf558895af16cd2450198d51f8c4a3d4
|
[
"MIT"
] | null | null | null |
netnir/core/tasks/inventory.py
|
jtdub/netnir
|
9d2c3467cf558895af16cd2450198d51f8c4a3d4
|
[
"MIT"
] | null | null | null |
netnir/core/tasks/inventory.py
|
jtdub/netnir
|
9d2c3467cf558895af16cd2450198d51f8c4a3d4
|
[
"MIT"
] | 1
|
2021-04-09T18:06:08.000Z
|
2021-04-09T18:06:08.000Z
|
from netnir.helpers.scaffold.command import CommandScaffold
class Inventory(CommandScaffold):
"""
cli based inventory search
"""
def run(self):
from netnir.plugins.facts import inventory_facts
from nornir.plugins.functions.text import print_result
self.nr = self._inventory()
results = self.nr.run(
task=inventory_facts,
name="INVENTORY FACTS",
num_workers=self.args.workers,
dry_run=self.args.X,
severity_level=self._verbose()["level"],
to_console=self._verbose()["to_console"],
)
print_result(result=results, severity_level=self._verbose()["level"])
return results
| 27.576923
| 77
| 0.631799
|
b9ed0661bbe2109d68df279dac25da8ce61ce56b
| 136
|
py
|
Python
|
graph_ter_seg/__init__.py
|
RicardoLanJ/graph-ter
|
3b9bda527a6a9559be835c5b84e6491ac8c5aa30
|
[
"MIT"
] | 58
|
2020-03-24T16:06:21.000Z
|
2022-03-26T07:04:28.000Z
|
graph_ter_seg/__init__.py
|
RicardoLanJ/graph-ter
|
3b9bda527a6a9559be835c5b84e6491ac8c5aa30
|
[
"MIT"
] | 6
|
2020-04-02T08:52:37.000Z
|
2020-11-27T12:27:23.000Z
|
graph_ter_seg/__init__.py
|
RicardoLanJ/graph-ter
|
3b9bda527a6a9559be835c5b84e6491ac8c5aa30
|
[
"MIT"
] | 19
|
2020-03-29T18:23:55.000Z
|
2021-12-25T04:10:00.000Z
|
from graph_ter_seg import dataset
from graph_ter_seg import models
from graph_ter_seg import tools
from graph_ter_seg import transforms
| 27.2
| 36
| 0.882353
|
e07c0d1a70a2aea0ab8cda1008fa6e6d5db4ca45
| 1,270
|
py
|
Python
|
setup.py
|
m3at/backlight
|
a80ccbe6193e1fc4c169df2ebc2bf49876d4d39d
|
[
"MIT"
] | null | null | null |
setup.py
|
m3at/backlight
|
a80ccbe6193e1fc4c169df2ebc2bf49876d4d39d
|
[
"MIT"
] | null | null | null |
setup.py
|
m3at/backlight
|
a80ccbe6193e1fc4c169df2ebc2bf49876d4d39d
|
[
"MIT"
] | null | null | null |
import setuptools
import sys
import os
from glob import glob
# hack to extract metadata directly from the python package
sys.path.append("src") # noqa
from backlight import __author__, __version__, __license__
def read(fname):
with open(fname, "r", encoding="utf-8") as fh:
long_description = fh.read()
return long_description
setuptools.setup(
name="backlight",
version=__version__,
description="Model evaluation framework for AlpacaForecast",
author=__author__,
author_email="info@alpaca.ai",
long_description=read("README.md"),
long_description_content_type="text/markdown",
license=__license__,
url="https://github.com/AlpacaDB/backlight.git",
keywords="",
packages=setuptools.find_packages("src"),
package_dir={"": "src"},
py_modules=[
os.path.splitext(os.path.basename(path))[0] for path in glob("src/*.py")
],
install_requires=[
"pandas==0.21.0",
"numpy>=1.15.0",
"matplotlib>=2.2.2",
"boto3>=1.9.36",
],
tests_require=[
"pytest-cov>=2.5.1",
"pytest-mock>=1.7.1",
"pytest-flake8>=1.0.0",
"pytest-sugar>=0.9.1",
"pytest>=3.5.0",
"autopep8>=1.2.3",
"flake8>=3.5.0",
],
)
| 25.918367
| 80
| 0.62126
|
01f8a510e014d2f94c5ccf197e04233480c2a2f0
| 3,069
|
py
|
Python
|
invenio_records_lom/ext.py
|
tu-graz-library/invenio-records-lom
|
c811506e51a1ed15d11cf10d6e6ef83a4ecc202b
|
[
"MIT"
] | null | null | null |
invenio_records_lom/ext.py
|
tu-graz-library/invenio-records-lom
|
c811506e51a1ed15d11cf10d6e6ef83a4ecc202b
|
[
"MIT"
] | 18
|
2020-10-21T07:58:14.000Z
|
2022-03-29T12:10:25.000Z
|
invenio_records_lom/ext.py
|
tu-graz-library/invenio-records-lom
|
c811506e51a1ed15d11cf10d6e6ef83a4ecc202b
|
[
"MIT"
] | 7
|
2020-10-06T08:46:40.000Z
|
2021-07-06T13:21:29.000Z
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2020 Graz University of Technology.
#
# invenio-records-lom is free software; you can redistribute it and/or modify it
# under the terms of the MIT License; see LICENSE file for more details.
"""Flask extension for invenio-records-lom."""
from __future__ import absolute_import, print_function
from werkzeug.utils import cached_property
from . import config
from .services import LOMRecordService, LOMRecordServiceConfig
class InvenioRecordsLOM(object):
"""invenio-records-lom extension."""
def __init__(self, app=None):
"""Extension initialization."""
if app:
self.init_app(app)
@cached_property
def lom_cls(self):
"""Base Lom API class."""
# TODO: Refactor
# def default_class_factory():
# from .api import LomRecordBase
# return type(
# 'InvenioRecordsLOM',
# (LomRecordBase),
# {},
# )
# return self.app.config['LOM_CLS'] or default_class_factory()
from .api import LomRecordBase
return type(
"Lom",
(LomRecordBase,),
{},
)
def init_app(self, app):
"""Flask application initialization."""
self.init_config(app)
self.init_services(app)
app.extensions["invenio-records-lom"] = self
def init_config(self, app):
"""Initialize configuration.
Override configuration variables with the values in this package.
"""
for k in dir(config):
if k.startswith("LOM_"):
if k == "LOM_REST_ENDPOINTS":
# Make sure of registration process.
app.config.setdefault("RECORDS_REST_ENDPOINTS", {})
app.config["RECORDS_REST_ENDPOINTS"].update(getattr(config, k))
if k == "LOM_REST_FACETS":
app.config.setdefault("RECORDS_REST_FACETS", {})
app.config["RECORDS_REST_FACETS"].update(getattr(config, k))
app.config.setdefault(k, getattr(config, k))
if k == "LOM_REST_SORT_OPTIONS":
# TODO Might be overriden depending on which package is
# initialised first
app.config.setdefault("RECORDS_REST_SORT_OPTIONS", {})
app.config["RECORDS_REST_SORT_OPTIONS"].update(getattr(config, k))
if k == "LOM_REST_DEFAULT_SORT":
# TODO Might be overriden depending on which package is
# initialised first
app.config.setdefault("RECORDS_REST_DEFAULT_SORT", {})
app.config["RECORDS_REST_DEFAULT_SORT"].update(getattr(config, k))
def init_services(self, app):
"""Initialize Services."""
service_config = (
LOMRecordServiceConfig # config_class as in invenio-RDM and invenio-MARC21
)
self.records_service = LOMRecordService(
service_config,
)
| 34.1
| 87
| 0.584881
|
de567ea5dfc1ee2c201d988d20a0aef51fe7c817
| 2,425
|
py
|
Python
|
python/dp/partition.py
|
aocsa/cs_hackers_hub
|
b3c42932260720c60e2cc9a5d552f3987a45fa21
|
[
"Apache-2.0"
] | 4
|
2021-03-04T02:45:26.000Z
|
2021-05-28T22:51:45.000Z
|
python/dp/partition.py
|
aocsa/cs_hackers_hub
|
b3c42932260720c60e2cc9a5d552f3987a45fa21
|
[
"Apache-2.0"
] | 1
|
2021-03-14T16:12:32.000Z
|
2021-03-14T16:12:32.000Z
|
python/dp/partition.py
|
aocsa/cs_hackers_hub
|
b3c42932260720c60e2cc9a5d552f3987a45fa21
|
[
"Apache-2.0"
] | 5
|
2021-03-16T05:20:48.000Z
|
2021-03-24T01:46:28.000Z
|
# **Problem Statement
#
# Given a set of positive numbers, find if we can partition it into two subsets such that the sum of elements in both subsets is equal.
# Input:{1,2,3,4}
# Output:True
# {1,4}&{2,3} => 5 | 5
#Input:{1,1,3,4,7}
#Output:True
#{1,3,4}&{1,7} => 8 | 8
#Input:{2,3,4,6} = sum => 15??
#Output:False
# can_partition_rec([1, 2, 3, 4], target=5)
# f(A = [1, 2, 3, 4], target=5)
# base case 1: f(A=[], x) <- 0
# base case 2: f(A=[...], 0) <- 1
# recursive case:
# f(A, x) => f(A[1:], x - A[0]) + f(A[1:], x)
def can_partition_rec(A, x):
if len(A) == 0:
return False
if x == 0:
return True
f1 = False
if x >= A[0]:
f1 = can_partition_rec(A[1:], x - A[0])
f2 = can_partition_rec(A[1:], x)
return f1 or f2
def can_partition(elems):
s = sum(elems)
if s % 2 != 0:
return False
return can_partition_rec(elems, s // 2 )
def can_partition_rec_2(A, x, path):
if len(A) == 0:
return False
if x == 0:
print(path)
return True
f1 = False
if x >= A[0]:
f1 = can_partition_rec_2(A[1:], x - A[0], path + [A[0]])
f2 = can_partition_rec_2(A[1:], x, path)
return f1 or f2
def can_partition_dp(elems):
print("problem: ", elems)
s = sum(elems)
if s % 2 != 0:
return False
return can_partition_rec_2(elems, s // 2, [])
class Partition:
def __init__(self):
self.memo = {}
self.path = {}
def can_partition_rec_dp(self, A, x, path):
key = (len(A), x)
if key in self.memo:
return self.memo[key]
if len(A) == 0:
return False
if x == 0:
self.path = path
return True
f1 = False
if x >= A[0]:
f1 = self.can_partition_rec_dp(A[1:], x - A[0], path + [A[0]])
f2 = self.can_partition_rec_dp(A[1:], x, path)
self.memo[key] = f1 or f2
return f1 or f2
def run(self, elems):
self.memo = {}
self.path = {}
s = sum(elems)
if s % 2 != 0:
return False
out = self.can_partition_rec_dp(elems, s // 2, [])
other = elems.copy()
for o in self.path:
other.pop(other.index(o))
print(self.path, other)
return out
s = Partition()
print(s.run([1, 2, 3, 4])) # True
print(s.run([1, 1, 3, 4, 7])) # True
print(s.run([2, 3, 4, 6])) # False
| 23.317308
| 135
| 0.512577
|
360a60ec1f62cf4a79a3d271b8ff147f9cfe2645
| 585
|
py
|
Python
|
model_mapreduce_job/config.py
|
schmidtbri/map-reduce-ml-model-deployment
|
84d4adbedd1ae8b76a3626a13dc8b98f30bb3ee9
|
[
"MIT"
] | null | null | null |
model_mapreduce_job/config.py
|
schmidtbri/map-reduce-ml-model-deployment
|
84d4adbedd1ae8b76a3626a13dc8b98f30bb3ee9
|
[
"MIT"
] | null | null | null |
model_mapreduce_job/config.py
|
schmidtbri/map-reduce-ml-model-deployment
|
84d4adbedd1ae8b76a3626a13dc8b98f30bb3ee9
|
[
"MIT"
] | null | null | null |
"""Configuration for the job."""
class Config(dict):
"""Configuration for all environments."""
models = [
{
"module_name": "iris_model.iris_predict",
"class_name": "IrisModel"
}
]
class ProdConfig(Config):
"""Configuration for the prod environment."""
pass
class BetaConfig(Config):
"""Configuration for the beta environment."""
pass
class TestConfig(Config):
"""Configuration for the test environment."""
pass
class DevConfig(Config):
"""Configuration for the dev environment."""
pass
| 15.810811
| 53
| 0.615385
|
1114f110a7092724c1da557123b8fef2cc5cd852
| 14
|
py
|
Python
|
lib/__init__.py
|
RimuEirnarn/GTRN_Client
|
54bd7b27ff12880272ba1c631066a3cdd733a6bd
|
[
"BSD-3-Clause"
] | 1
|
2021-04-19T19:50:07.000Z
|
2021-04-19T19:50:07.000Z
|
lib/__init__.py
|
RimuEirnarn/GTRN_Client
|
54bd7b27ff12880272ba1c631066a3cdd733a6bd
|
[
"BSD-3-Clause"
] | null | null | null |
lib/__init__.py
|
RimuEirnarn/GTRN_Client
|
54bd7b27ff12880272ba1c631066a3cdd733a6bd
|
[
"BSD-3-Clause"
] | null | null | null |
"""Library"""
| 7
| 13
| 0.5
|
5019ee9acc028c6f5d882c686caece7cccf31d96
| 21,668
|
py
|
Python
|
model/backbones/timm/models/levit.py
|
maxingan2412/CIL-ReID
|
0307ca57a5da9cfcb30fc3810469b0a953c77bfb
|
[
"MIT"
] | 58
|
2021-09-02T13:09:57.000Z
|
2022-03-20T06:44:56.000Z
|
model/backbones/timm/models/levit.py
|
maxingan2412/CIL-ReID
|
0307ca57a5da9cfcb30fc3810469b0a953c77bfb
|
[
"MIT"
] | 5
|
2021-11-19T06:38:43.000Z
|
2022-03-31T13:48:11.000Z
|
model/backbones/timm/models/levit.py
|
maxingan2412/CIL-ReID
|
0307ca57a5da9cfcb30fc3810469b0a953c77bfb
|
[
"MIT"
] | 12
|
2021-11-03T21:32:12.000Z
|
2022-03-19T07:55:08.000Z
|
""" LeViT
Paper: `LeViT: a Vision Transformer in ConvNet's Clothing for Faster Inference`
- https://arxiv.org/abs/2104.01136
@article{graham2021levit,
title={LeViT: a Vision Transformer in ConvNet's Clothing for Faster Inference},
author={Benjamin Graham and Alaaeldin El-Nouby and Hugo Touvron and Pierre Stock and Armand Joulin and Herv\'e J\'egou and Matthijs Douze},
journal={arXiv preprint arXiv:22104.01136},
year={2021}
}
Adapted from official impl at https://github.com/facebookresearch/LeViT, original copyright bellow.
This version combines both conv/linear models and fixes torchscript compatibility.
Modifications by/coyright Copyright 2021 Ross Wightman
"""
# Copyright (c) 2015-present, Facebook, Inc.
# All rights reserved.
# Modified from
# https://github.com/rwightman/pytorch-image-models/blob/master/timm/models/vision_transformer.py
# Copyright 2020 Ross Wightman, Apache-2.0 License
import itertools
from copy import deepcopy
from functools import partial
from typing import Dict
import torch
import torch.nn as nn
from timm.data import IMAGENET_DEFAULT_STD, IMAGENET_DEFAULT_MEAN
from .helpers import build_model_with_cfg, overlay_external_default_cfg
from .layers import to_ntuple
from .vision_transformer import trunc_normal_
from .registry import register_model
def _cfg(url='', **kwargs):
return {
'url': url,
'num_classes': 1000, 'input_size': (3, 224, 224), 'pool_size': None,
'crop_pct': .9, 'interpolation': 'bicubic', 'fixed_input_size': True,
'mean': IMAGENET_DEFAULT_MEAN, 'std': IMAGENET_DEFAULT_STD,
'first_conv': 'patch_embed.0.c', 'classifier': ('head.l', 'head_dist.l'),
**kwargs
}
default_cfgs = dict(
levit_128s=_cfg(
url='https://dl.fbaipublicfiles.com/LeViT/LeViT-128S-96703c44.pth'
),
levit_128=_cfg(
url='https://dl.fbaipublicfiles.com/LeViT/LeViT-128-b88c2750.pth'
),
levit_192=_cfg(
url='https://dl.fbaipublicfiles.com/LeViT/LeViT-192-92712e41.pth'
),
levit_256=_cfg(
url='https://dl.fbaipublicfiles.com/LeViT/LeViT-256-13b5763e.pth'
),
levit_384=_cfg(
url='https://dl.fbaipublicfiles.com/LeViT/LeViT-384-9bdaf2e2.pth'
),
)
model_cfgs = dict(
levit_128s=dict(
embed_dim=(128, 256, 384), key_dim=16, num_heads=(4, 6, 8), depth=(2, 3, 4)),
levit_128=dict(
embed_dim=(128, 256, 384), key_dim=16, num_heads=(4, 8, 12), depth=(4, 4, 4)),
levit_192=dict(
embed_dim=(192, 288, 384), key_dim=32, num_heads=(3, 5, 6), depth=(4, 4, 4)),
levit_256=dict(
embed_dim=(256, 384, 512), key_dim=32, num_heads=(4, 6, 8), depth=(4, 4, 4)),
levit_384=dict(
embed_dim=(384, 512, 768), key_dim=32, num_heads=(6, 9, 12), depth=(4, 4, 4)),
)
__all__ = ['Levit']
@register_model
def levit_128s(pretrained=False, fuse=False,distillation=True, use_conv=False, **kwargs):
return create_levit(
'levit_128s', pretrained=pretrained, fuse=fuse, distillation=distillation, use_conv=use_conv, **kwargs)
@register_model
def levit_128(pretrained=False, fuse=False, distillation=True, use_conv=False, **kwargs):
return create_levit(
'levit_128', pretrained=pretrained, fuse=fuse, distillation=distillation, use_conv=use_conv, **kwargs)
@register_model
def levit_192(pretrained=False, fuse=False, distillation=True, use_conv=False, **kwargs):
return create_levit(
'levit_192', pretrained=pretrained, fuse=fuse, distillation=distillation, use_conv=use_conv, **kwargs)
@register_model
def levit_256(pretrained=False, fuse=False, distillation=True, use_conv=False, **kwargs):
return create_levit(
'levit_256', pretrained=pretrained, fuse=fuse, distillation=distillation, use_conv=use_conv, **kwargs)
@register_model
def levit_384(pretrained=False, fuse=False, distillation=True, use_conv=False, **kwargs):
return create_levit(
'levit_384', pretrained=pretrained, fuse=fuse, distillation=distillation, use_conv=use_conv, **kwargs)
@register_model
def levit_c_128s(pretrained=False, fuse=False, distillation=True, use_conv=True,**kwargs):
return create_levit(
'levit_128s', pretrained=pretrained, fuse=fuse, distillation=distillation, use_conv=use_conv, **kwargs)
@register_model
def levit_c_128(pretrained=False, fuse=False,distillation=True, use_conv=True, **kwargs):
return create_levit(
'levit_128', pretrained=pretrained, fuse=fuse, distillation=distillation, use_conv=use_conv, **kwargs)
@register_model
def levit_c_192(pretrained=False, fuse=False, distillation=True, use_conv=True, **kwargs):
return create_levit(
'levit_192', pretrained=pretrained, fuse=fuse, distillation=distillation, use_conv=use_conv, **kwargs)
@register_model
def levit_c_256(pretrained=False, fuse=False, distillation=True, use_conv=True, **kwargs):
return create_levit(
'levit_256', pretrained=pretrained, fuse=fuse, distillation=distillation, use_conv=use_conv, **kwargs)
@register_model
def levit_c_384(pretrained=False, fuse=False, distillation=True, use_conv=True, **kwargs):
return create_levit(
'levit_384', pretrained=pretrained, fuse=fuse, distillation=distillation, use_conv=use_conv, **kwargs)
class ConvNorm(nn.Sequential):
def __init__(
self, a, b, ks=1, stride=1, pad=0, dilation=1, groups=1, bn_weight_init=1, resolution=-10000):
super().__init__()
self.add_module('c', nn.Conv2d(a, b, ks, stride, pad, dilation, groups, bias=False))
bn = nn.BatchNorm2d(b)
nn.init.constant_(bn.weight, bn_weight_init)
nn.init.constant_(bn.bias, 0)
self.add_module('bn', bn)
@torch.no_grad()
def fuse(self):
c, bn = self._modules.values()
w = bn.weight / (bn.running_var + bn.eps) ** 0.5
w = c.weight * w[:, None, None, None]
b = bn.bias - bn.running_mean * bn.weight / (bn.running_var + bn.eps) ** 0.5
m = nn.Conv2d(
w.size(1), w.size(0), w.shape[2:], stride=self.c.stride,
padding=self.c.padding, dilation=self.c.dilation, groups=self.c.groups)
m.weight.data.copy_(w)
m.bias.data.copy_(b)
return m
class LinearNorm(nn.Sequential):
def __init__(self, a, b, bn_weight_init=1, resolution=-100000):
super().__init__()
self.add_module('c', nn.Linear(a, b, bias=False))
bn = nn.BatchNorm1d(b)
nn.init.constant_(bn.weight, bn_weight_init)
nn.init.constant_(bn.bias, 0)
self.add_module('bn', bn)
@torch.no_grad()
def fuse(self):
l, bn = self._modules.values()
w = bn.weight / (bn.running_var + bn.eps) ** 0.5
w = l.weight * w[:, None]
b = bn.bias - bn.running_mean * bn.weight / (bn.running_var + bn.eps) ** 0.5
m = nn.Linear(w.size(1), w.size(0))
m.weight.data.copy_(w)
m.bias.data.copy_(b)
return m
def forward(self, x):
x = self.c(x)
return self.bn(x.flatten(0, 1)).reshape_as(x)
class NormLinear(nn.Sequential):
def __init__(self, a, b, bias=True, std=0.02):
super().__init__()
self.add_module('bn', nn.BatchNorm1d(a))
l = nn.Linear(a, b, bias=bias)
trunc_normal_(l.weight, std=std)
if bias:
nn.init.constant_(l.bias, 0)
self.add_module('l', l)
@torch.no_grad()
def fuse(self):
bn, l = self._modules.values()
w = bn.weight / (bn.running_var + bn.eps) ** 0.5
b = bn.bias - self.bn.running_mean * self.bn.weight / (bn.running_var + bn.eps) ** 0.5
w = l.weight * w[None, :]
if l.bias is None:
b = b @ self.l.weight.T
else:
b = (l.weight @ b[:, None]).view(-1) + self.l.bias
m = nn.Linear(w.size(1), w.size(0))
m.weight.data.copy_(w)
m.bias.data.copy_(b)
return m
def stem_b16(in_chs, out_chs, activation, resolution=224):
return nn.Sequential(
ConvNorm(in_chs, out_chs // 8, 3, 2, 1, resolution=resolution),
activation(),
ConvNorm(out_chs // 8, out_chs // 4, 3, 2, 1, resolution=resolution // 2),
activation(),
ConvNorm(out_chs // 4, out_chs // 2, 3, 2, 1, resolution=resolution // 4),
activation(),
ConvNorm(out_chs // 2, out_chs, 3, 2, 1, resolution=resolution // 8))
class Residual(nn.Module):
def __init__(self, m, drop):
super().__init__()
self.m = m
self.drop = drop
def forward(self, x):
if self.training and self.drop > 0:
return x + self.m(x) * torch.rand(
x.size(0), 1, 1, device=x.device).ge_(self.drop).div(1 - self.drop).detach()
else:
return x + self.m(x)
class Subsample(nn.Module):
def __init__(self, stride, resolution):
super().__init__()
self.stride = stride
self.resolution = resolution
def forward(self, x):
B, N, C = x.shape
x = x.view(B, self.resolution, self.resolution, C)[:, ::self.stride, ::self.stride]
return x.reshape(B, -1, C)
class Attention(nn.Module):
ab: Dict[str, torch.Tensor]
def __init__(
self, dim, key_dim, num_heads=8, attn_ratio=4, act_layer=None, resolution=14, use_conv=False):
super().__init__()
self.num_heads = num_heads
self.scale = key_dim ** -0.5
self.key_dim = key_dim
self.nh_kd = nh_kd = key_dim * num_heads
self.d = int(attn_ratio * key_dim)
self.dh = int(attn_ratio * key_dim) * num_heads
self.attn_ratio = attn_ratio
self.use_conv = use_conv
ln_layer = ConvNorm if self.use_conv else LinearNorm
h = self.dh + nh_kd * 2
self.qkv = ln_layer(dim, h, resolution=resolution)
self.proj = nn.Sequential(
act_layer(),
ln_layer(self.dh, dim, bn_weight_init=0, resolution=resolution))
points = list(itertools.product(range(resolution), range(resolution)))
N = len(points)
attention_offsets = {}
idxs = []
for p1 in points:
for p2 in points:
offset = (abs(p1[0] - p2[0]), abs(p1[1] - p2[1]))
if offset not in attention_offsets:
attention_offsets[offset] = len(attention_offsets)
idxs.append(attention_offsets[offset])
self.attention_biases = nn.Parameter(torch.zeros(num_heads, len(attention_offsets)))
self.register_buffer('attention_bias_idxs', torch.LongTensor(idxs).view(N, N))
self.ab = {}
@torch.no_grad()
def train(self, mode=True):
super().train(mode)
if mode and self.ab:
self.ab = {} # clear ab cache
def get_attention_biases(self, device: torch.device) -> torch.Tensor:
if self.training:
return self.attention_biases[:, self.attention_bias_idxs]
else:
device_key = str(device)
if device_key not in self.ab:
self.ab[device_key] = self.attention_biases[:, self.attention_bias_idxs]
return self.ab[device_key]
def forward(self, x): # x (B,C,H,W)
if self.use_conv:
B, C, H, W = x.shape
q, k, v = self.qkv(x).view(B, self.num_heads, -1, H * W).split([self.key_dim, self.key_dim, self.d], dim=2)
attn = (q.transpose(-2, -1) @ k) * self.scale + self.get_attention_biases(x.device)
attn = attn.softmax(dim=-1)
x = (v @ attn.transpose(-2, -1)).view(B, -1, H, W)
else:
B, N, C = x.shape
qkv = self.qkv(x)
q, k, v = qkv.view(B, N, self.num_heads, -1).split([self.key_dim, self.key_dim, self.d], dim=3)
q = q.permute(0, 2, 1, 3)
k = k.permute(0, 2, 1, 3)
v = v.permute(0, 2, 1, 3)
attn = q @ k.transpose(-2, -1) * self.scale + self.get_attention_biases(x.device)
attn = attn.softmax(dim=-1)
x = (attn @ v).transpose(1, 2).reshape(B, N, self.dh)
x = self.proj(x)
return x
class AttentionSubsample(nn.Module):
ab: Dict[str, torch.Tensor]
def __init__(
self, in_dim, out_dim, key_dim, num_heads=8, attn_ratio=2,
act_layer=None, stride=2, resolution=14, resolution_=7, use_conv=False):
super().__init__()
self.num_heads = num_heads
self.scale = key_dim ** -0.5
self.key_dim = key_dim
self.nh_kd = nh_kd = key_dim * num_heads
self.d = int(attn_ratio * key_dim)
self.dh = self.d * self.num_heads
self.attn_ratio = attn_ratio
self.resolution_ = resolution_
self.resolution_2 = resolution_ ** 2
self.use_conv = use_conv
if self.use_conv:
ln_layer = ConvNorm
sub_layer = partial(nn.AvgPool2d, kernel_size=1, padding=0)
else:
ln_layer = LinearNorm
sub_layer = partial(Subsample, resolution=resolution)
h = self.dh + nh_kd
self.kv = ln_layer(in_dim, h, resolution=resolution)
self.q = nn.Sequential(
sub_layer(stride=stride),
ln_layer(in_dim, nh_kd, resolution=resolution_))
self.proj = nn.Sequential(
act_layer(),
ln_layer(self.dh, out_dim, resolution=resolution_))
self.stride = stride
self.resolution = resolution
points = list(itertools.product(range(resolution), range(resolution)))
points_ = list(itertools.product(range(resolution_), range(resolution_)))
N = len(points)
N_ = len(points_)
attention_offsets = {}
idxs = []
for p1 in points_:
for p2 in points:
size = 1
offset = (
abs(p1[0] * stride - p2[0] + (size - 1) / 2),
abs(p1[1] * stride - p2[1] + (size - 1) / 2))
if offset not in attention_offsets:
attention_offsets[offset] = len(attention_offsets)
idxs.append(attention_offsets[offset])
self.attention_biases = nn.Parameter(torch.zeros(num_heads, len(attention_offsets)))
self.register_buffer('attention_bias_idxs', torch.LongTensor(idxs).view(N_, N))
self.ab = {} # per-device attention_biases cache
@torch.no_grad()
def train(self, mode=True):
super().train(mode)
if mode and self.ab:
self.ab = {} # clear ab cache
def get_attention_biases(self, device: torch.device) -> torch.Tensor:
if self.training:
return self.attention_biases[:, self.attention_bias_idxs]
else:
device_key = str(device)
if device_key not in self.ab:
self.ab[device_key] = self.attention_biases[:, self.attention_bias_idxs]
return self.ab[device_key]
def forward(self, x):
if self.use_conv:
B, C, H, W = x.shape
k, v = self.kv(x).view(B, self.num_heads, -1, H * W).split([self.key_dim, self.d], dim=2)
q = self.q(x).view(B, self.num_heads, self.key_dim, self.resolution_2)
attn = (q.transpose(-2, -1) @ k) * self.scale + self.get_attention_biases(x.device)
attn = attn.softmax(dim=-1)
x = (v @ attn.transpose(-2, -1)).reshape(B, -1, self.resolution_, self.resolution_)
else:
B, N, C = x.shape
k, v = self.kv(x).view(B, N, self.num_heads, -1).split([self.key_dim, self.d], dim=3)
k = k.permute(0, 2, 1, 3) # BHNC
v = v.permute(0, 2, 1, 3) # BHNC
q = self.q(x).view(B, self.resolution_2, self.num_heads, self.key_dim).permute(0, 2, 1, 3)
attn = q @ k.transpose(-2, -1) * self.scale + self.get_attention_biases(x.device)
attn = attn.softmax(dim=-1)
x = (attn @ v).transpose(1, 2).reshape(B, -1, self.dh)
x = self.proj(x)
return x
class Levit(nn.Module):
""" Vision Transformer with support for patch or hybrid CNN input stage
"""
def __init__(
self,
img_size=224,
patch_size=16,
in_chans=3,
num_classes=1000,
embed_dim=(192,),
key_dim=64,
depth=(12,),
num_heads=(3,),
attn_ratio=2,
mlp_ratio=2,
hybrid_backbone=None,
down_ops=None,
act_layer=nn.Hardswish,
attn_act_layer=nn.Hardswish,
distillation=True,
use_conv=False,
drop_path=0):
super().__init__()
if isinstance(img_size, tuple):
# FIXME origin impl passes single img/res dim through whole hierarchy,
# not sure this model will be used enough to spend time fixing it.
assert img_size[0] == img_size[1]
img_size = img_size[0]
self.num_classes = num_classes
self.num_features = embed_dim[-1]
self.embed_dim = embed_dim
N = len(embed_dim)
assert len(depth) == len(num_heads) == N
key_dim = to_ntuple(N)(key_dim)
attn_ratio = to_ntuple(N)(attn_ratio)
mlp_ratio = to_ntuple(N)(mlp_ratio)
down_ops = down_ops or (
# ('Subsample',key_dim, num_heads, attn_ratio, mlp_ratio, stride)
('Subsample', key_dim[0], embed_dim[0] // key_dim[0], 4, 2, 2),
('Subsample', key_dim[0], embed_dim[1] // key_dim[1], 4, 2, 2),
('',)
)
self.distillation = distillation
self.use_conv = use_conv
ln_layer = ConvNorm if self.use_conv else LinearNorm
self.patch_embed = hybrid_backbone or stem_b16(in_chans, embed_dim[0], activation=act_layer)
self.blocks = []
resolution = img_size // patch_size
for i, (ed, kd, dpth, nh, ar, mr, do) in enumerate(
zip(embed_dim, key_dim, depth, num_heads, attn_ratio, mlp_ratio, down_ops)):
for _ in range(dpth):
self.blocks.append(
Residual(
Attention(
ed, kd, nh, attn_ratio=ar, act_layer=attn_act_layer,
resolution=resolution, use_conv=use_conv),
drop_path))
if mr > 0:
h = int(ed * mr)
self.blocks.append(
Residual(nn.Sequential(
ln_layer(ed, h, resolution=resolution),
act_layer(),
ln_layer(h, ed, bn_weight_init=0, resolution=resolution),
), drop_path))
if do[0] == 'Subsample':
# ('Subsample',key_dim, num_heads, attn_ratio, mlp_ratio, stride)
resolution_ = (resolution - 1) // do[5] + 1
self.blocks.append(
AttentionSubsample(
*embed_dim[i:i + 2], key_dim=do[1], num_heads=do[2],
attn_ratio=do[3], act_layer=attn_act_layer, stride=do[5],
resolution=resolution, resolution_=resolution_, use_conv=use_conv))
resolution = resolution_
if do[4] > 0: # mlp_ratio
h = int(embed_dim[i + 1] * do[4])
self.blocks.append(
Residual(nn.Sequential(
ln_layer(embed_dim[i + 1], h, resolution=resolution),
act_layer(),
ln_layer(h, embed_dim[i + 1], bn_weight_init=0, resolution=resolution),
), drop_path))
self.blocks = nn.Sequential(*self.blocks)
# Classifier head
self.head = NormLinear(embed_dim[-1], num_classes) if num_classes > 0 else nn.Identity()
if distillation:
self.head_dist = NormLinear(embed_dim[-1], num_classes) if num_classes > 0 else nn.Identity()
else:
self.head_dist = None
@torch.jit.ignore
def no_weight_decay(self):
return {x for x in self.state_dict().keys() if 'attention_biases' in x}
def forward(self, x):
x = self.patch_embed(x)
if not self.use_conv:
x = x.flatten(2).transpose(1, 2)
x = self.blocks(x)
x = x.mean((-2, -1)) if self.use_conv else x.mean(1)
if self.head_dist is not None:
x, x_dist = self.head(x), self.head_dist(x)
if self.training and not torch.jit.is_scripting():
return x, x_dist
else:
# during inference, return the average of both classifier predictions
return (x + x_dist) / 2
else:
x = self.head(x)
return x
def checkpoint_filter_fn(state_dict, model):
if 'model' in state_dict:
# For deit models
state_dict = state_dict['model']
D = model.state_dict()
for k in state_dict.keys():
if D[k].ndim == 4 and state_dict[k].ndim == 2:
state_dict[k] = state_dict[k][:, :, None, None]
return state_dict
def create_levit(variant, pretrained=False, default_cfg=None, fuse=False, **kwargs):
if kwargs.get('features_only', None):
raise RuntimeError('features_only not implemented for Vision Transformer models.')
model_cfg = dict(**model_cfgs[variant], **kwargs)
model = build_model_with_cfg(
Levit, variant, pretrained,
default_cfg=default_cfgs[variant],
pretrained_filter_fn=checkpoint_filter_fn,
**model_cfg)
#if fuse:
# utils.replace_batchnorm(model)
return model
| 38.080844
| 141
| 0.596733
|
a288e9391cea8415c663eb7b232f50e07b13c875
| 528
|
py
|
Python
|
backend/home/migrations/0001_load_initial_data.py
|
crowdbotics-apps/apino-32697
|
1195a53621e1595b0a1bf1d18db283143ca4ce8f
|
[
"FTL",
"AML",
"RSA-MD"
] | null | null | null |
backend/home/migrations/0001_load_initial_data.py
|
crowdbotics-apps/apino-32697
|
1195a53621e1595b0a1bf1d18db283143ca4ce8f
|
[
"FTL",
"AML",
"RSA-MD"
] | null | null | null |
backend/home/migrations/0001_load_initial_data.py
|
crowdbotics-apps/apino-32697
|
1195a53621e1595b0a1bf1d18db283143ca4ce8f
|
[
"FTL",
"AML",
"RSA-MD"
] | null | null | null |
from django.db import migrations
def create_site(apps, schema_editor):
Site = apps.get_model("sites", "Site")
custom_domain = "apino-32697.botics.co"
site_params = {
"name": "Apino",
}
if custom_domain:
site_params["domain"] = custom_domain
Site.objects.update_or_create(defaults=site_params, id=1)
class Migration(migrations.Migration):
dependencies = [
("sites", "0002_alter_domain_unique"),
]
operations = [
migrations.RunPython(create_site),
]
| 20.307692
| 61
| 0.651515
|
a6d5569539801f6041949357af688bf5040e847f
| 2,672
|
py
|
Python
|
samples/cli/accelbyte_py_sdk_cli/iam/_admin_get_banned_users_v3.py
|
AccelByte/accelbyte-python-sdk
|
dcd311fad111c59da828278975340fb92e0f26f7
|
[
"MIT"
] | null | null | null |
samples/cli/accelbyte_py_sdk_cli/iam/_admin_get_banned_users_v3.py
|
AccelByte/accelbyte-python-sdk
|
dcd311fad111c59da828278975340fb92e0f26f7
|
[
"MIT"
] | 1
|
2021-10-13T03:46:58.000Z
|
2021-10-13T03:46:58.000Z
|
samples/cli/accelbyte_py_sdk_cli/iam/_admin_get_banned_users_v3.py
|
AccelByte/accelbyte-python-sdk
|
dcd311fad111c59da828278975340fb92e0f26f7
|
[
"MIT"
] | null | null | null |
# Copyright (c) 2021 AccelByte Inc. All Rights Reserved.
# This is licensed software from AccelByte Inc, for limitations
# and restrictions contact your company contract manager.
#
# Code generated. DO NOT EDIT!
# template_file: python-cli-command.j2
# justice-iam-service (5.10.1)
# pylint: disable=duplicate-code
# pylint: disable=line-too-long
# pylint: disable=missing-function-docstring
# pylint: disable=missing-module-docstring
# pylint: disable=too-many-arguments
# pylint: disable=too-many-branches
# pylint: disable=too-many-instance-attributes
# pylint: disable=too-many-lines
# pylint: disable=too-many-locals
# pylint: disable=too-many-public-methods
# pylint: disable=too-many-return-statements
# pylint: disable=too-many-statements
# pylint: disable=unused-import
import json
import yaml
from typing import Optional
import click
from .._utils import login_as as login_as_internal
from .._utils import to_dict
from accelbyte_py_sdk.api.iam import admin_get_banned_users_v3 as admin_get_banned_users_v3_internal
from accelbyte_py_sdk.api.iam.models import ModelGetUserBanV3Response
@click.command()
@click.option("--active_only", "active_only", type=bool)
@click.option("--ban_type", "ban_type", type=str)
@click.option("--limit", "limit", type=int)
@click.option("--offset", "offset", type=int)
@click.option("--namespace", type=str)
@click.option("--login_as", type=click.Choice(["client", "user"], case_sensitive=False))
@click.option("--login_with_auth", type=str)
@click.option("--doc", type=bool)
def admin_get_banned_users_v3(
active_only: Optional[bool] = None,
ban_type: Optional[str] = None,
limit: Optional[int] = None,
offset: Optional[int] = None,
namespace: Optional[str] = None,
login_as: Optional[str] = None,
login_with_auth: Optional[str] = None,
doc: Optional[bool] = None,
):
if doc:
click.echo(admin_get_banned_users_v3_internal.__doc__)
return
x_additional_headers = None
if login_with_auth:
x_additional_headers = {
"Authorization": login_with_auth
}
else:
login_as_internal(login_as)
result, error = admin_get_banned_users_v3_internal(
active_only=active_only,
ban_type=ban_type,
limit=limit,
offset=offset,
namespace=namespace,
x_additional_headers=x_additional_headers,
)
if error:
raise Exception(f"AdminGetBannedUsersV3 failed: {str(error)}")
click.echo(yaml.safe_dump(to_dict(result), sort_keys=False))
admin_get_banned_users_v3.operation_id = "AdminGetBannedUsersV3"
admin_get_banned_users_v3.is_deprecated = False
| 32.987654
| 100
| 0.726048
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.