hexsha
stringlengths 40
40
| size
int64 3
1.03M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
972
| max_stars_repo_name
stringlengths 6
130
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
972
| max_issues_repo_name
stringlengths 6
130
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
116k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
972
| max_forks_repo_name
stringlengths 6
130
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 3
1.03M
| avg_line_length
float64 1.13
941k
| max_line_length
int64 2
941k
| alphanum_fraction
float64 0
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
c2e7bb5c6b24c4cca3af7a71c9170adb1f1080ac
| 1,401
|
py
|
Python
|
cse481wi18/applications/scripts/temp.py
|
TimAdamson21/access_teleop
|
4ca4cc3ebc29cb4942cec5c8e3e60b897b80590c
|
[
"MIT"
] | null | null | null |
cse481wi18/applications/scripts/temp.py
|
TimAdamson21/access_teleop
|
4ca4cc3ebc29cb4942cec5c8e3e60b897b80590c
|
[
"MIT"
] | null | null | null |
cse481wi18/applications/scripts/temp.py
|
TimAdamson21/access_teleop
|
4ca4cc3ebc29cb4942cec5c8e3e60b897b80590c
|
[
"MIT"
] | null | null | null |
#! /usr/bin/env python
import fetch_api
import rospy
import rosbag
import numpy as np
def wait_for_time():
"""Wait for simulated time to begin.
"""
while rospy.Time().now().to_sec() == 0:
pass
def main():
rospy.init_node('arm_demo')
wait_for_time()
pose1 = [1.0, 1.25, 2.0, -2.25, 0, 1.0, 0.0]
pose2 = [1.0, 1.25, 2.0, -2.25, 1, 1.0, 0.0]
arm = fetch_api.Arm()
# arm.move_to_joints(fetch_api.ArmJoints.from_list(pose1))
# poses = [fetch_api.ArmJoints.from_list(pose1), fetch_api.ArmJoints.from_list(pose2)]
# for pose in poses:
# arm.move_to_joints(pose)
bag = rosbag.Bag('/home/maru/catkin_ws/src/limb_manipulation/bags/temp.bag')
poses = []
prev_msg = []
# get the trajectory from bag file
for topic, msg, t in bag.read_messages(topics=['/joint_states']):
joint_state = list(msg.position[6:13])
if len(joint_state) != 0 and (len(prev_msg) == 0 or np.abs(np.sum(np.subtract(joint_state, prev_msg))) > ARM_TRAJ_TRESHOLD):
prev_msg = joint_state
pose = fetch_api.ArmJoints.from_list(prev_msg)
poses.append(pose)
bag.close()
# follow the trajectory
if len(poses) == 0:
# empty trajectory
rospy.logerr("Empty trajectory for action: " + abbr)
for pose in poses:
arm.move_to_joints(pose)
if __name__ == '__main__':
main()
| 29.1875
| 132
| 0.626695
|
d6390339522e3fc8abaaa6006454fe4bad2df1e6
| 2,177
|
py
|
Python
|
DPGAnalysis/SiStripTools/python/overlapproblemtsosanalyzer_cfi.py
|
ckamtsikis/cmssw
|
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
|
[
"Apache-2.0"
] | 852
|
2015-01-11T21:03:51.000Z
|
2022-03-25T21:14:00.000Z
|
DPGAnalysis/SiStripTools/python/overlapproblemtsosanalyzer_cfi.py
|
ckamtsikis/cmssw
|
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
|
[
"Apache-2.0"
] | 30,371
|
2015-01-02T00:14:40.000Z
|
2022-03-31T23:26:05.000Z
|
DPGAnalysis/SiStripTools/python/overlapproblemtsosanalyzer_cfi.py
|
ckamtsikis/cmssw
|
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
|
[
"Apache-2.0"
] | 3,240
|
2015-01-02T05:53:18.000Z
|
2022-03-31T17:24:21.000Z
|
import FWCore.ParameterSet.Config as cms
overlapproblemtsosanalyzer = cms.EDAnalyzer("OverlapProblemTSOSAnalyzer",
trajTrackAssoCollection = cms.InputTag("refittedTracks"),
onlyValidRecHit = cms.bool(True),
tsosHMConf = cms.PSet(
wantedSubDets = cms.VPSet(cms.PSet(detLabel=cms.string("TECR1"),title=cms.string("TEC R1"),selection=cms.untracked.vstring("0x1e0000e0-0x1c000020")),
cms.PSet(detLabel=cms.string("TECR2"),title=cms.string("TEC R2"),selection=cms.untracked.vstring("0x1e0000e0-0x1c000040")),
cms.PSet(detLabel=cms.string("TECR3"),title=cms.string("TEC R3"),selection=cms.untracked.vstring("0x1e0000e0-0x1c000060")),
cms.PSet(detLabel=cms.string("TECR4"),title=cms.string("TEC R4"),selection=cms.untracked.vstring("0x1e0000e0-0x1c000080")),
cms.PSet(detLabel=cms.string("TECR5"),title=cms.string("TEC R5"),selection=cms.untracked.vstring("0x1e0000e0-0x1c0000a0")),
cms.PSet(detLabel=cms.string("TECR6"),title=cms.string("TEC R6"),selection=cms.untracked.vstring("0x1e0000e0-0x1c0000c0")),
cms.PSet(detLabel=cms.string("TECR7"),title=cms.string("TEC R7"),selection=cms.untracked.vstring("0x1e0000e0-0x1c0000e0")),
cms.PSet(detLabel=cms.string("FPIXpP1"),title=cms.string("FPIX+ panel 1"),selection=cms.untracked.vstring("0x1f800300-0x15000100")),
cms.PSet(detLabel=cms.string("FPIXpP2"),title=cms.string("FPIX+ panel 2"),selection=cms.untracked.vstring("0x1f800300-0x15000200")),
cms.PSet(detLabel=cms.string("FPIXmP1"),title=cms.string("FPIX- panel 1"),selection=cms.untracked.vstring("0x1f800300-0x14800100")),
cms.PSet(detLabel=cms.string("FPIXmP2"),title=cms.string("FPIX- panel 2"),selection=cms.untracked.vstring("0x1f800300-0x14800200"))
)
)
)
| 98.954545
| 162
| 0.595315
|
0a631180a06f4010b39762de3a4aefcc3f5cf4e1
| 20,472
|
py
|
Python
|
notebooks/Gentle-Intro-To-HARK.py
|
JackShiqiLi/DemARK
|
2cd3de40574e0eb9a9da795307b2b5c97d6e22bd
|
[
"Apache-2.0"
] | null | null | null |
notebooks/Gentle-Intro-To-HARK.py
|
JackShiqiLi/DemARK
|
2cd3de40574e0eb9a9da795307b2b5c97d6e22bd
|
[
"Apache-2.0"
] | null | null | null |
notebooks/Gentle-Intro-To-HARK.py
|
JackShiqiLi/DemARK
|
2cd3de40574e0eb9a9da795307b2b5c97d6e22bd
|
[
"Apache-2.0"
] | null | null | null |
# ---
# jupyter:
# jupytext:
# cell_metadata_filter: collapsed,code_folding
# formats: ipynb,py:percent
# text_representation:
# extension: .py
# format_name: percent
# format_version: '1.2'
# jupytext_version: 1.2.1
# kernelspec:
# display_name: Python 3
# language: python
# name: python3
# ---
# %% [markdown]
# # A Gentle Introduction to HARK
#
# This notebook provides a simple, hands-on tutorial for first time HARK users -- and potentially first time Python users. It does not go "into the weeds" - we have hidden some code cells that do boring things that you don't need to digest on your first experience with HARK. Our aim is to convey a feel for how the toolkit works.
#
# For readers for whom this is your very first experience with Python, we have put important Python concepts in **boldface**. For those for whom this is the first time they have used a Jupyter notebook, we have put Jupyter instructions in _italics_. Only cursory definitions (if any) are provided here. If you want to learn more, there are many online Python and Jupyter tutorials.
# %% {"code_folding": []}
# This cell has a bit of initial setup. You can click the triangle to the left to expand it.
# Click the "Run" button immediately above the notebook in order to execute the contents of any cell
# WARNING: Each cell in the notebook relies upon results generated by previous cells
# The most common problem beginners have is to execute a cell before all its predecessors
# If you do this, you can restart the kernel (see the "Kernel" menu above) and start over
# %matplotlib inline
import matplotlib.pyplot as plt
# The first step is to be able to bring things in from different directories
import sys
import os
sys.path.insert(0, os.path.abspath('../lib'))
from util import log_progress
import numpy as np
import HARK
from time import clock
from copy import deepcopy
mystr = lambda number : "{:.4f}".format(number)
from HARK.utilities import plotFuncs
# %% [markdown]
# ## Your First HARK Model: Perfect Foresight
#
# $$\newcommand{\CRRA}{\rho}\newcommand{\DiscFac}{\beta}$$
# We start with almost the simplest possible consumption model: A consumer with CRRA utility
#
# \begin{equation}
# U(C) = \frac{C^{1-\CRRA}}{1-\rho}
# \end{equation}
#
# has perfect foresight about everything except the (stochastic) date of death, which occurs with constant probability implying a "survival probability" $\newcommand{\LivPrb}{\aleph}\LivPrb < 1$. Permanent labor income $P_t$ grows from period to period by a factor $\Gamma_t$. At the beginning of each period $t$, the consumer has some amount of market resources $M_t$ (which includes both market wealth and currrent income) and must choose how much of those resources to consume $C_t$ and how much to retain in a riskless asset $A_t$ which will earn return factor $R$. The agent's flow of utility $U(C_t)$ from consumption is geometrically discounted by factor $\beta$. Between periods, the agent dies with probability $\mathsf{D}_t$, ending his problem.
#
# The agent's problem can be written in Bellman form as:
#
# \begin{eqnarray*}
# V_t(M_t,P_t) &=& \max_{C_t}~U(C_t) + \beta \aleph V_{t+1}(M_{t+1},P_{t+1}), \\
# & s.t. & \\
# %A_t &=& M_t - C_t, \\
# M_{t+1} &=& R (M_{t}-C_{t}) + Y_{t+1}, \\
# P_{t+1} &=& \Gamma_{t+1} P_t, \\
# \end{eqnarray*}
#
# A particular perfect foresight agent's problem can be characterized by values of risk aversion $\rho$, discount factor $\beta$, and return factor $R$, along with sequences of income growth factors $\{ \Gamma_t \}$ and survival probabilities $\{\mathsf{\aleph}_t\}$. To keep things simple, let's forget about "sequences" of income growth and mortality, and just think about an $\textit{infinite horizon}$ consumer with constant income growth and survival probability.
#
# ## Representing Agents in HARK
#
# HARK represents agents solving this type of problem as $\textbf{instances}$ of the $\textbf{class}$ $\texttt{PerfForesightConsumerType}$, a $\textbf{subclass}$ of $\texttt{AgentType}$. To make agents of this class, we must import the class itself into our workspace. (Run the cell below in order to do this).
# %%
from HARK.ConsumptionSaving.ConsIndShockModel import PerfForesightConsumerType
# %% [markdown]
# The $\texttt{PerfForesightConsumerType}$ class contains within itself the python code that constructs the solution for the perfect foresight model we are studying here, as specifically articulated in [these lecture notes](http://econ.jhu.edu/people/ccarroll/public/lecturenotes/consumption/PerfForesightCRRA/).
#
# To create an instance of $\texttt{PerfForesightConsumerType}$, we simply call the class as if it were a function, passing as arguments the specific parameter values we want it to have. In the hidden cell below, we define a $\textbf{dictionary}$ named $\texttt{PF_dictionary}$ with these parameter values:
#
# | Param | Description | Code | Value |
# | :---: | --- | --- | :---: |
# | $\rho$ | Relative risk aversion | $\texttt{CRRA}$ | 2.5 |
# | $\beta$ | Discount factor | $\texttt{DiscFac}$ | 0.96 |
# | $R$ | Risk free interest factor | $\texttt{Rfree}$ | 1.03 |
# | $\newcommand{\LivFac}{\aleph}\LivFac$ | Survival probability | $\texttt{LivPrb}$ | 0.98 |
# | $\Gamma$ | Income growth factor | $\texttt{PermGroFac}$ | 1.01 |
#
#
# For now, don't worry about the specifics of dictionaries. All you need to know is that a dictionary lets us pass many arguments wrapped up in one simple data structure.
# %% {"code_folding": []}
# This cell defines a parameter dictionary. You can expand it if you want to see what that looks like.
PF_dictionary = {
'CRRA' : 2.5,
'DiscFac' : 0.96,
'Rfree' : 1.03,
'LivPrb' : [0.98],
'PermGroFac' : [1.01],
'T_cycle' : 1,
'cycles' : 0,
'AgentCount' : 10000
}
# To those curious enough to open this hidden cell, you might notice that we defined
# a few extra parameters in that dictionary: T_cycle, cycles, and AgentCount. Don't
# worry about these for now.
# %% [markdown]
# Let's make an **object** named $\texttt{PFexample}$ which is an **instance** of the $\texttt{PerfForesightConsumerType}$ class. The object $\texttt{PFexample}$ will bundle together the abstract mathematical description of the solution embodied in $\texttt{PerfForesightConsumerType}$, and the specific set of parameter values defined in $\texttt{PF_dictionary}$. Such a bundle is created passing $\texttt{PF_dictionary}$ to the class $\texttt{PerfForesightConsumerType}$:
# %%
PFexample = PerfForesightConsumerType(**PF_dictionary)
# the asterisks ** basically say "here come some arguments" to PerfForesightConsumerType
# %% [markdown]
# In $\texttt{PFexample}$, we now have _defined_ the problem of a particular infinite horizon perfect foresight consumer who knows how to solve this problem.
#
# ## Solving an Agent's Problem
#
# To tell the agent actually to solve the problem, we call the agent's $\texttt{solve}$ **method**. (A *method** is essentially a function that an object runs that affects the object's own internal characteristics -- in this case, the method adds the consumption function to the contents of $\texttt{PFexample}$.)
#
# The cell below calls the $\texttt{solve}$ method for $\texttt{PFexample}$
# %%
PFexample.solve()
# %% [markdown]
# Running the $\texttt{solve}$ method creates the **attribute** of $\texttt{PFexample}$ named $\texttt{solution}$. In fact, every subclass of $\texttt{AgentType}$ works the same way: The class definition contains the abstract algorithm that knows how to solve the model, but to obtain the particular solution for a specific instance (paramterization/configuration), that instance must be instructed to $\texttt{solve()}$ its problem.
#
# The $\texttt{solution}$ attribute is always a $\textit{list}$ of solutions to a single period of the problem. In the case of an infinite horizon model like the one here, there is just one element in that list -- the solution to all periods of the infinite horizon problem. The consumption function stored as the first element (element 0) of the solution list can be retrieved by:
# %%
PFexample.solution[0].cFunc
# %% [markdown]
# One of the results proven in the associated [the lecture notes](http://econ.jhu.edu/people/ccarroll/public/lecturenotes/consumption/PerfForesightCRRA/) is that, for the specific problem defined above, there is a solution in which the _ratio_ $c = C/P$ is a linear function of the _ratio_ of market resources to permanent income, $m = M/P$.
#
# This is why $\texttt{cFunc}$ can be represented by a linear interpolation. It can be plotted between an $m$ ratio of 0 and 10 using the command below.
# %%
mPlotTop=10
plotFuncs(PFexample.solution[0].cFunc,0.,mPlotTop)
# %% [markdown]
# The figure illustrates one of the surprising features of the perfect foresight model: A person with zero money should be spending at a rate more than double their income (that is, $\texttt{cFunc}(0.) \approx 2.08$ - the intersection on the vertical axis). How can this be?
#
# The answer is that we have not incorporated any constraint that would prevent the agent from borrowing against the entire PDV of future earnings-- human wealth. How much is that? What's the minimum value of $m_t$ where the consumption function is defined? We can check by retrieving the $\texttt{hNrm}$ **attribute** of the solution, which calculates the value of human wealth normalized by permanent income:
# %%
humanWealth = PFexample.solution[0].hNrm
mMinimum = PFexample.solution[0].mNrmMin
print("This agent's human wealth is " + str(humanWealth) + ' times his current income level.')
print("This agent's consumption function is defined (consumption is positive) down to m_t = " + str(mMinimum))
# %% [markdown]
# Yikes! Let's take a look at the bottom of the consumption function. In the cell below, set the bounds of the $\texttt{plotFuncs}$ function to display down to the lowest defined value of the consumption function.
# %%
# YOUR FIRST HANDS-ON EXERCISE!
# Fill in the value for "mPlotBottom" to plot the consumption function from the point where it is zero.
plotFuncs(PFexample.solution[0].cFunc,mPlotBottom,mPlotTop)
# %% [markdown]
# ## Changing Agent Parameters
#
# Suppose you wanted to change one (or more) of the parameters of the agent's problem and see what that does. We want to compare consumption functions before and after we change parameters, so let's make a new instance of $\texttt{PerfForesightConsumerType}$ by copying $\texttt{PFexample}$.
# %%
NewExample = deepcopy(PFexample)
# %% [markdown]
# In Python, you can set an **attribute** of an object just like any other variable. For example, we could make the new agent less patient:
# %%
NewExample.DiscFac = 0.90
NewExample.solve()
mPlotBottom = mMinimum
plotFuncs([PFexample.solution[0].cFunc,NewExample.solution[0].cFunc],mPlotBottom,mPlotTop)
# %% [markdown]
# (Note that you can pass a **list** of functions to $\texttt{plotFuncs}$ as the first argument rather than just a single function. Lists are written inside of [square brackets].)
#
# Let's try to deal with the "problem" of massive human wealth by making another consumer who has essentially no future income. We can virtually eliminate human wealth by making the permanent income growth factor $\textit{very}$ small.
#
# In $\texttt{PFexample}$, the agent's income grew by 1 percent per period -- his $\texttt{PermGroFac}$ took the value 1.01. What if our new agent had a growth factor of 0.01 -- his income __shrinks__ by 99 percent each period? In the cell below, set $\texttt{NewExample}$'s discount factor back to its original value, then set its $\texttt{PermGroFac}$ attribute so that the growth factor is 0.01 each period.
#
# Important: Recall that the model at the top of this document said that an agent's problem is characterized by a sequence of income growth factors, but we tabled that concept. Because $\texttt{PerfForesightConsumerType}$ treats $\texttt{PermGroFac}$ as a __time-varying__ attribute, it must be specified as a **list** (with a single element in this case).
# %%
# Revert NewExample's discount factor and make his future income minuscule
# print("your lines here")
# Compare the old and new consumption functions
plotFuncs([PFexample.solution[0].cFunc,NewExample.solution[0].cFunc],0.,10.)
# %% [markdown]
# Now $\texttt{NewExample}$'s consumption function has the same slope (MPC) as $\texttt{PFexample}$, but it emanates from (almost) zero-- he has basically no future income to borrow against!
#
# If you'd like, use the cell above to alter $\texttt{NewExample}$'s other attributes (relative risk aversion, etc) and see how the consumption function changes. However, keep in mind that \textit{no solution exists} for some combinations of parameters. HARK should let you know if this is the case if you try to solve such a model.
#
#
# ## Your Second HARK Model: Adding Income Shocks
#
# Linear consumption functions are pretty boring, and you'd be justified in feeling unimpressed if all HARK could do was plot some lines. Let's look at another model that adds two important layers of complexity: income shocks and (artificial) borrowing constraints.
#
# Specifically, our new type of consumer receives two income shocks at the beginning of each period: a completely transitory shock $\theta_t$ and a completely permanent shock $\psi_t$. Moreover, lenders will not let the agent borrow money such that his ratio of end-of-period assets $A_t$ to permanent income $P_t$ is less than $\underline{a}$. As with the perfect foresight problem, this model can be framed in terms of __normalized__ variables, e.g. $m_t \equiv M_t/P_t$. (See [here](http://econ.jhu.edu/people/ccarroll/papers/BufferStockTheory/) for all the theory).
#
# \begin{eqnarray*}
# v_t(m_t) &=& \max_{c_t} ~ U(c_t) ~ + \phantom{\LivFac} \beta \mathbb{E} [(\Gamma_{t+1}\psi_{t+1})^{1-\rho} v_{t+1}(m_{t+1}) ], \\
# a_t &=& m_t - c_t, \\
# a_t &\geq& \underset{\bar{}}{a}, \\
# m_{t+1} &=& R/(\Gamma_{t+1} \psi_{t+1}) a_t + \theta_{t+1}, \\
# \mathbb{E}[\psi]=\mathbb{E}[\theta] &=& 1, \\
# u(c) &=& \frac{c^{1-\rho}}{1-\rho}.
# \end{eqnarray*}
#
# HARK represents agents with this kind of problem as instances of the class $\texttt{IndShockConsumerType}$. To create an $\texttt{IndShockConsumerType}$, we must specify the same set of parameters as for a $\texttt{PerfForesightConsumerType}$, as well as an artificial borrowing constraint $\underline{a}$ and a sequence of income shocks. It's easy enough to pick a borrowing constraint -- say, zero -- but how would we specify the distributions of the shocks? Can't the joint distribution of permanent and transitory shocks be just about anything?
#
# _Yes_, and HARK can handle whatever correlation structure a user might care to specify. However, the default behavior of $\texttt{IndShockConsumerType}$ is that the distribution of permanent income shocks is mean one lognormal, and the distribution of transitory shocks is mean one lognormal augmented with a point mass representing unemployment. The distributions are independent of each other by default, and by default are approximated with $N$ point equiprobable distributions.
#
# Let's make an infinite horizon instance of $\texttt{IndShockConsumerType}$ with the same parameters as our original perfect foresight agent, plus the extra parameters to specify the income shock distribution and the artificial borrowing constraint. As before, we'll make a dictionary:
#
#
# | Param | Description | Code | Value |
# | :---: | --- | --- | :---: |
# | \underline{a} | Artificial borrowing constraint | $\texttt{BoroCnstArt}$ | 0.0 |
# | $\sigma_\psi$ | Underlying stdev of permanent income shocks | $\texttt{PermShkStd}$ | 0.1 |
# | $\sigma_\theta$ | Underlying stdev of transitory income shocks | $\texttt{TranShkStd}$ | 0.1 |
# | $N_\psi$ | Number of discrete permanent income shocks | $\texttt{PermShkCount}$ | 7 |
# | $N_\theta$ | Number of discrete transitory income shocks | $\texttt{TranShkCount}$ | 7 |
# | $\mho$ | Unemployment probability | $\texttt{UnempPrb}$ | 0.05 |
# | $\underset{\bar{}}{\theta}$ | Transitory shock when unemployed | $\texttt{IncUnemp}$ | 0.3 |
# %% {"code_folding": []}
# This cell defines a parameter dictionary for making an instance of IndShockConsumerType.
IndShockDictionary = {
'CRRA': 2.5, # The dictionary includes our original parameters...
'Rfree': 1.03,
'DiscFac': 0.96,
'LivPrb': [0.98],
'PermGroFac': [1.01],
'PermShkStd': [0.1], # ... and the new parameters for constructing the income process.
'PermShkCount': 7,
'TranShkStd': [0.1],
'TranShkCount': 7,
'UnempPrb': 0.05,
'IncUnemp': 0.3,
'BoroCnstArt': 0.0,
'aXtraMin': 0.001, # aXtra parameters specify how to construct the grid of assets.
'aXtraMax': 50., # Don't worry about these for now
'aXtraNestFac': 3,
'aXtraCount': 48,
'aXtraExtra': [None],
'vFuncBool': False, # These booleans indicate whether the value function should be calculated
'CubicBool': False, # and whether to use cubic spline interpolation. You can ignore them.
'aNrmInitMean' : -10.,
'aNrmInitStd' : 0.0, # These parameters specify the (log) distribution of normalized assets
'pLvlInitMean' : 0.0, # and permanent income for agents at "birth". They are only relevant in
'pLvlInitStd' : 0.0, # simulation and you don't need to worry about them.
'PermGroFacAgg' : 1.0,
'T_retire': 0, # What's this about retirement? ConsIndShock is set up to be able to
'UnempPrbRet': 0.0, # handle lifecycle models as well as infinite horizon problems. Swapping
'IncUnempRet': 0.0, # out the structure of the income process is easy, but ignore for now.
'T_age' : None,
'T_cycle' : 1,
'cycles' : 0,
'AgentCount': 10000,
'tax_rate':0.0,
}
# Hey, there's a lot of parameters we didn't tell you about! Yes, but you don't need to
# think about them for now.
# %% [markdown]
# As before, we need to import the relevant subclass of $\texttt{AgentType}$ into our workspace, then create an instance by passing the dictionary to the class as if the class were a function.
# %%
from HARK.ConsumptionSaving.ConsIndShockModel import IndShockConsumerType
IndShockExample = IndShockConsumerType(**IndShockDictionary)
# %% [markdown]
# Now we can solve our new agent's problem just like before, using the $\texttt{solve}$ method.
# %%
IndShockExample.solve()
plotFuncs(IndShockExample.solution[0].cFunc,0.,10.)
# %% [markdown]
# ## Changing Constructed Attributes
#
# In the parameter dictionary above, we chose values for HARK to use when constructing its numeric representation of $F_t$, the joint distribution of permanent and transitory income shocks. When $\texttt{IndShockExample}$ was created, those parameters ($\texttt{TranShkStd}$, etc) were used by the **constructor** or **initialization** method of $\texttt{IndShockConsumerType}$ to construct an attribute called $\texttt{IncomeDstn}$.
#
# Suppose you were interested in changing (say) the amount of permanent income risk. From the section above, you might think that you could simply change the attribute $\texttt{TranShkStd}$, solve the model again, and it would work.
#
# That's _almost_ true-- there's one extra step. $\texttt{TranShkStd}$ is a primitive input, but it's not the thing you _actually_ want to change. Changing $\texttt{TranShkStd}$ doesn't actually update the income distribution... unless you tell it to (just like changing an agent's preferences does not change the consumption function that was stored for the old set of parameters -- until you invoke the $\texttt{solve}$ method again). In the cell below, we invoke the method $\texttt{updateIncomeProcess}$ so HARK knows to reconstruct the attribute $\texttt{IncomeDstn}$.
# %%
OtherExample = deepcopy(IndShockExample) # Make a copy so we can compare consumption functions
OtherExample.PermShkStd = [0.2] # Double permanent income risk (note that it's a one element list)
OtherExample.updateIncomeProcess() # Call the method to reconstruct the representation of F_t
OtherExample.solve()
# %% [markdown]
# In the cell below, use your blossoming HARK skills to plot the consumption function for $\texttt{IndShockExample}$ and $\texttt{OtherExample}$ on the same figure.
# %%
# Use the line(s) below to plot the consumptions functions against each other
| 65.197452
| 757
| 0.730852
|
114488f5d72253f9d0ed36e8d866accf7bc63474
| 734
|
py
|
Python
|
setup.py
|
djw8605/scitokens
|
06f27c54f7e2d7456c0f26066376fee90567fbe6
|
[
"Apache-2.0"
] | null | null | null |
setup.py
|
djw8605/scitokens
|
06f27c54f7e2d7456c0f26066376fee90567fbe6
|
[
"Apache-2.0"
] | null | null | null |
setup.py
|
djw8605/scitokens
|
06f27c54f7e2d7456c0f26066376fee90567fbe6
|
[
"Apache-2.0"
] | null | null | null |
"""
Install file for SciTokens project.
"""
import setuptools
setuptools.setup(name="scitokens",
version="1.2.0",
description="SciToken reference implementation library",
author_email="team@scitokens.org",
author="Brian Bockelman",
url="https://scitokens.org",
package_dir={"": "src"},
packages=["scitokens", "scitokens.utils"],
scripts=['tools/scitokens-admin-create-token',
'tools/scitokens-admin-create-key'],
install_requires=['cryptography',
'PyJWT',
'six'],
)
| 33.363636
| 73
| 0.472752
|
c89894d8ad0073cc3e99367b65fd183a9a84e8da
| 11,751
|
py
|
Python
|
python/src/iceberg/expressions/literals.py
|
wgzhxy/iceberg
|
547152cf3dfa5e399351b851c48edd8f916e9c48
|
[
"Apache-2.0"
] | null | null | null |
python/src/iceberg/expressions/literals.py
|
wgzhxy/iceberg
|
547152cf3dfa5e399351b851c48edd8f916e9c48
|
[
"Apache-2.0"
] | null | null | null |
python/src/iceberg/expressions/literals.py
|
wgzhxy/iceberg
|
547152cf3dfa5e399351b851c48edd8f916e9c48
|
[
"Apache-2.0"
] | null | null | null |
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=W0613
import struct
from decimal import ROUND_HALF_UP, Decimal
from functools import singledispatch, singledispatchmethod
from typing import Optional, Union
from uuid import UUID
from iceberg.expressions.base import Literal
from iceberg.types import (
BinaryType,
BooleanType,
DateType,
DecimalType,
DoubleType,
FixedType,
FloatType,
IntegerType,
LongType,
StringType,
TimestampType,
TimestamptzType,
TimeType,
UUIDType,
)
from iceberg.utils.datetime import (
date_to_days,
micros_to_days,
time_to_micros,
timestamp_to_micros,
timestamptz_to_micros,
)
from iceberg.utils.singleton import Singleton
@singledispatch
def literal(value) -> Literal:
"""
A generic Literal factory to construct an iceberg Literal based on python primitive data type
using dynamic overloading
Args:
value(python primitive type): the value to be associated with literal
Example:
from iceberg.expressions.literals import literal
>>> literal(123)
LongLiteral(123)
"""
raise TypeError(f"Invalid literal value: {repr(value)}")
@literal.register(bool)
def _(value: bool) -> Literal[bool]:
return BooleanLiteral(value)
@literal.register(int)
def _(value: int) -> Literal[int]:
return LongLiteral(value)
@literal.register(float)
def _(value: float) -> Literal[float]:
# expression binding can convert to FloatLiteral if needed
return DoubleLiteral(value)
@literal.register(str)
def _(value: str) -> Literal[str]:
return StringLiteral(value)
@literal.register(UUID)
def _(value: UUID) -> Literal[UUID]:
return UUIDLiteral(value)
@literal.register(bytes)
def _(value: bytes) -> Literal[bytes]:
# expression binding can convert to FixedLiteral if needed
return BinaryLiteral(value)
@literal.register(bytearray)
def _(value: bytearray) -> Literal[bytes]:
return BinaryLiteral(bytes(value))
@literal.register(Decimal)
def _(value: Decimal) -> Literal[Decimal]:
return DecimalLiteral(value)
class AboveMax(metaclass=Singleton):
@property
def value(self):
raise ValueError("AboveMax has no value")
def to(self, type_var):
raise TypeError("Cannot change the type of AboveMax")
def __repr__(self):
return "AboveMax()"
def __str__(self):
return "AboveMax"
class BelowMin(metaclass=Singleton):
def __init__(self):
pass
def value(self):
raise ValueError("BelowMin has no value")
def to(self, type_var):
raise TypeError("Cannot change the type of BelowMin")
def __repr__(self):
return "BelowMin()"
def __str__(self):
return "BelowMin"
class BooleanLiteral(Literal[bool]):
def __init__(self, value: bool):
super().__init__(value, bool)
@singledispatchmethod
def to(self, type_var):
return None
@to.register(BooleanType)
def _(self, type_var):
return self
class LongLiteral(Literal[int]):
def __init__(self, value: int):
super().__init__(value, int)
@singledispatchmethod
def to(self, type_var):
return None
@to.register(LongType)
def _(self, type_var: LongType) -> Literal[int]:
return self
@to.register(IntegerType)
def _(self, type_var: IntegerType) -> Union[AboveMax, BelowMin, Literal[int]]:
if IntegerType.max < self.value:
return AboveMax()
elif IntegerType.min > self.value:
return BelowMin()
return self
@to.register(FloatType)
def _(self, type_var: FloatType) -> Literal[float]:
return FloatLiteral(float(self.value))
@to.register(DoubleType)
def _(self, type_var: DoubleType) -> Literal[float]:
return DoubleLiteral(float(self.value))
@to.register(DateType)
def _(self, type_var: DateType) -> Literal[int]:
return DateLiteral(self.value)
@to.register(TimeType)
def _(self, type_var: TimeType) -> Literal[int]:
return TimeLiteral(self.value)
@to.register(TimestampType)
def _(self, type_var: TimestampType) -> Literal[int]:
return TimestampLiteral(self.value)
@to.register(DecimalType)
def _(self, type_var: DecimalType) -> Literal[Decimal]:
unscaled = Decimal(self.value)
if type_var.scale == 0:
return DecimalLiteral(unscaled)
else:
sign, digits, _ = unscaled.as_tuple()
zeros = (0,) * type_var.scale
return DecimalLiteral(Decimal((sign, digits + zeros, -type_var.scale)))
class FloatLiteral(Literal[float]):
def __init__(self, value: float):
super().__init__(value, float)
self._value32 = struct.unpack("<f", struct.pack("<f", value))[0]
def __eq__(self, other):
return self._value32 == other
def __lt__(self, other):
return self._value32 < other
def __gt__(self, other):
return self._value32 > other
def __le__(self, other):
return self._value32 <= other
def __ge__(self, other):
return self._value32 >= other
@singledispatchmethod
def to(self, type_var):
return None
@to.register(FloatType)
def _(self, type_var: FloatType) -> Literal[float]:
return self
@to.register(DoubleType)
def _(self, type_var: DoubleType) -> Literal[float]:
return DoubleLiteral(self.value)
@to.register(DecimalType)
def _(self, type_var: DecimalType) -> Literal[Decimal]:
return DecimalLiteral(Decimal(self.value).quantize(Decimal((0, (1,), -type_var.scale)), rounding=ROUND_HALF_UP))
class DoubleLiteral(Literal[float]):
def __init__(self, value: float):
super().__init__(value, float)
@singledispatchmethod
def to(self, type_var):
return None
@to.register(DoubleType)
def _(self, type_var: DoubleType) -> Literal[float]:
return self
@to.register(FloatType)
def _(self, type_var: FloatType) -> Union[AboveMax, BelowMin, Literal[float]]:
if FloatType.max < self.value:
return AboveMax()
elif FloatType.min > self.value:
return BelowMin()
return FloatLiteral(self.value)
@to.register(DecimalType)
def _(self, type_var: DecimalType) -> Literal[Decimal]:
return DecimalLiteral(Decimal(self.value).quantize(Decimal((0, (1,), -type_var.scale)), rounding=ROUND_HALF_UP))
class DateLiteral(Literal[int]):
def __init__(self, value: int):
super().__init__(value, int)
@singledispatchmethod
def to(self, type_var):
return None
@to.register(DateType)
def _(self, type_var: DateType) -> Literal[int]:
return self
class TimeLiteral(Literal[int]):
def __init__(self, value: int):
super().__init__(value, int)
@singledispatchmethod
def to(self, type_var):
return None
@to.register(TimeType)
def _(self, type_var: TimeType) -> Literal[int]:
return self
class TimestampLiteral(Literal[int]):
def __init__(self, value: int):
super().__init__(value, int)
@singledispatchmethod
def to(self, type_var):
return None
@to.register(TimestampType)
def _(self, type_var: TimestampType) -> Literal[int]:
return self
@to.register(DateType)
def _(self, type_var: DateType) -> Literal[int]:
return DateLiteral(micros_to_days(self.value))
class DecimalLiteral(Literal[Decimal]):
def __init__(self, value: Decimal):
super().__init__(value, Decimal)
@singledispatchmethod
def to(self, type_var):
return None
@to.register(DecimalType)
def _(self, type_var: DecimalType) -> Optional[Literal[Decimal]]:
if type_var.scale == abs(self.value.as_tuple().exponent):
return self
return None
class StringLiteral(Literal[str]):
def __init__(self, value: str):
super().__init__(value, str)
@singledispatchmethod
def to(self, type_var):
return None
@to.register(StringType)
def _(self, type_var: StringType) -> Literal[str]:
return self
@to.register(DateType)
def _(self, type_var: DateType) -> Optional[Literal[int]]:
try:
return DateLiteral(date_to_days(self.value))
except (TypeError, ValueError):
return None
@to.register(TimeType)
def _(self, type_var: TimeType) -> Optional[Literal[int]]:
try:
return TimeLiteral(time_to_micros(self.value))
except (TypeError, ValueError):
return None
@to.register(TimestampType)
def _(self, type_var: TimestampType) -> Optional[Literal[int]]:
try:
return TimestampLiteral(timestamp_to_micros(self.value))
except (TypeError, ValueError):
return None
@to.register(TimestamptzType)
def _(self, type_var: TimestamptzType) -> Optional[Literal[int]]:
try:
return TimestampLiteral(timestamptz_to_micros(self.value))
except (TypeError, ValueError):
return None
@to.register(UUIDType)
def _(self, type_var: UUIDType) -> Literal[UUID]:
return UUIDLiteral(UUID(self.value))
@to.register(DecimalType)
def _(self, type_var: DecimalType) -> Optional[Literal[Decimal]]:
dec = Decimal(self.value)
if type_var.scale == abs(dec.as_tuple().exponent):
return DecimalLiteral(dec)
else:
return None
class UUIDLiteral(Literal[UUID]):
def __init__(self, value: UUID):
super().__init__(value, UUID)
@singledispatchmethod
def to(self, type_var):
return None
@to.register(UUIDType)
def _(self, type_var: UUIDType) -> Literal[UUID]:
return self
class FixedLiteral(Literal[bytes]):
def __init__(self, value: bytes):
super().__init__(value, bytes)
@singledispatchmethod
def to(self, type_var):
return None
@to.register(FixedType)
def _(self, type_var: FixedType) -> Optional[Literal[bytes]]:
if len(self.value) == type_var.length:
return self
else:
return None
@to.register(BinaryType)
def _(self, type_var: BinaryType) -> Literal[bytes]:
return BinaryLiteral(self.value)
class BinaryLiteral(Literal[bytes]):
def __init__(self, value: bytes):
super().__init__(value, bytes)
@singledispatchmethod
def to(self, type_var):
return None
@to.register(BinaryType)
def _(self, type_var: BinaryType) -> Literal[bytes]:
return self
@to.register(FixedType)
def _(self, type_var: FixedType) -> Optional[Literal[bytes]]:
if type_var.length == len(self.value):
return FixedLiteral(self.value)
else:
return None
| 27.013793
| 120
| 0.659689
|
d5ea25777ad8146fced6fa9de729502e5fdec224
| 4,427
|
py
|
Python
|
bot.py
|
egor5q/loshadbot
|
971aeb11c311168779e59a144ecbd3d89b09aaca
|
[
"MIT"
] | null | null | null |
bot.py
|
egor5q/loshadbot
|
971aeb11c311168779e59a144ecbd3d89b09aaca
|
[
"MIT"
] | null | null | null |
bot.py
|
egor5q/loshadbot
|
971aeb11c311168779e59a144ecbd3d89b09aaca
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
import redis
import os
import telebot
import math
import random
import threading
import info
import test
from pymongo import MongoClient
from telebot import types
from emoji import emojize
token = os.environ['TELEGRAM_TOKEN']
bot = telebot.TeleBot(token)
pisuks=0
alr=0
spisok=['Ппц', 'Мда.', 'Дороу']
spisok2=[]
@bot.message_handler(commands=['chlen'])
def c(m):
bot.send_message(m.chat.id, 'Да да да, я работаю, отъебитесь')
@bot.message_handler(commands=['sasat'])
def sasat(m):
bot.send_message(m.chat.id, 'О, вы выбрали пункт "сасат"! Вы сасали '+str(random.randint(1, 100))+' членов!')
@bot.message_handler(commands=['extend'])
def penises(m):
collection.update_one({'penis':{'$exists':True}},{'$inc':{'penis':0.1}})
x = collection.find_one({'penis':{'$exists':True}})
x=round(x['penis'], 1)
bot.send_message(m.chat.id, 'Ура! Вы увеличили мой пенис! Теперь он '+str(x)+ 'см!')
def pisuk():
global pisuks
pisuks=0
@bot.message_handler(content_types=['sticker'])
def textm(m):
try:
print(m.sticker)
except:
pass
try:
global pisuks
global alr
p=m.text.lower()
if 'или' in p:
n=0
ili=0
while n<len(p):
try:
if p[n]=='и' and p[n+1]=='л' and p[n+2]=='и':
ili+=1
except:
pass
n+=1
print(str(ili))
a=p.split('или')
dd=0
g=0
try:
while g< len(a):
j=len(a[g])
if g<len(a) and g>0:
if a[g][0]==' ' and a[g][j-1]==' ':
dd=1
elif g==0:
if a[g][j-1]==' ':
dd=1
elif g==len(a)-1:
if a[g][0]==' ':
dd=1
g+=1
except:
pass
print (a)
if ili>0:
if dd==1:
try:
rd=random.randint(0,ili)
count=0
slovar=a[rd]
for i in slovar:
count+=1
if slovar[count-1]=='?':
slovar=slovar[:(count-1)]
print(slovar)
if slovar[0]=='я' or slovar[0]=='Я':
slovar=slovar[1:]
print(slovar)
slovar=slovar.capitalize()
print(slovar)
bot.send_message(m.chat.id, slovar)
except:
pass
if 'пасюк пидр' in p or 'писюк пидр' in p or 'пасюк пидор' in p or 'писюк пидр' in p:
bot.send_message(m.chat.id, 'Тсс')
alr=1
if pisuks==1:
print('1')
if 'п' in p and 'и' in p and 'д' in p and 'р' in p and len(p)<250:
for x in p:
if x not in spisok2:
spisok2.append(x)
if len(spisok2)<15:
print('2')
if alr==0:
bot.send_message(m.chat.id, 'Тсс')
pisuks=0
alr=1
spisok2.clear()
z=random.randint(1, 100)
if z==1:
speach=random.choice(spisok)
bot.send_message(m.chat.id, speach)
if 'п' in p and 'с' in p and 'ю' in p and 'к' in p and len(p)<250:
pisuks=1
t=threading.Timer(4, pisuk)
t.start()
print('3')
if 'п' in p and 'и' in p and 'ю' in p and 'к' in p and 'д' in p and len(p)<250:
if 'p' not in p and 'c' not in p and 'i' not in p and 'a' not in p and 'd' not in p and 'u' not in p:
if 'р' in p:
if 'с' in p:
if pisuks==1:
for x in p:
if x not in spisok2:
spisok2.append(x)
if len(spisok2)<18:
if alr==0:
bot.send_message(m.chat.id, 'Тсс')
pisuks=0
alr=1
else:
if len(spisok2)<15:
bot.send_message(m.chat.id, 'Тсс')
elif 'p' in p and 'a' in p and 's' in p and 'u' in p and 'k' in p and 'i' in p and 'd' in p and 'r' in p:
for x in p:
if x not in spisok2:
spisok2.append(x)
if len(spisok2)<15:
if alr==0:
bot.send_message(m.chat.id, 'nahui idi')
alr=1
spisok2.clear()
alr=0
except:
pass
if __name__ == '__main__':
bot.polling(none_stop=True)
| 26.195266
| 113
| 0.466004
|
f74a9e9b08cb1f8c919e8d1ca51905b2b548f308
| 1,663
|
gyp
|
Python
|
tools/relocation_packer/relocation_packer.gyp
|
justremotephone/android_external_chromium_org
|
246856e61da7acf5494076c74198f2aea894a721
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 2
|
2020-01-25T10:18:18.000Z
|
2021-01-23T15:29:56.000Z
|
tools/relocation_packer/relocation_packer.gyp
|
justremotephone/android_external_chromium_org
|
246856e61da7acf5494076c74198f2aea894a721
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | null | null | null |
tools/relocation_packer/relocation_packer.gyp
|
justremotephone/android_external_chromium_org
|
246856e61da7acf5494076c74198f2aea894a721
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 1
|
2020-11-04T07:24:13.000Z
|
2020-11-04T07:24:13.000Z
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'targets': [
{
'target_name': 'lib_relocation_packer',
'toolsets': ['host'],
'type': 'static_library',
'dependencies': [
'../../third_party/elfutils/elfutils.gyp:libelf',
],
'sources': [
'src/debug.cc',
'src/elf_file.cc',
'src/leb128.cc',
'src/packer.cc',
'src/run_length_encoder.cc',
],
},
{
'target_name': 'relocation_packer',
'toolsets': ['host'],
'type': 'executable',
'dependencies': [
'../../third_party/elfutils/elfutils.gyp:libelf',
'lib_relocation_packer',
],
'sources': [
'src/main.cc',
],
},
{
'target_name': 'relocation_packer_unittests',
'toolsets': ['host'],
'type': 'executable',
'cflags': [
'-DINTERMEDIATE_DIR="<(INTERMEDIATE_DIR)"',
],
'dependencies': [
'../../testing/gtest.gyp:gtest',
'lib_relocation_packer',
],
'include_dirs': [
'../..',
],
'sources': [
'src/elf_file_unittest.cc',
'src/leb128_unittest.cc',
'src/packer_unittest.cc',
'src/run_length_encoder_unittest.cc',
'src/run_all_unittests.cc',
],
'copies': [
{
'destination': '<(INTERMEDIATE_DIR)',
'files': [
'test_data/elf_file_unittest_relocs.so',
'test_data/elf_file_unittest_relocs_packed.so',
],
},
],
},
],
}
| 24.820896
| 72
| 0.51834
|
b6c73f50913fb9d89ac60b223d3c7d00698f65e5
| 1,902
|
py
|
Python
|
pysnmp-with-texts/ATEN-SMI.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 8
|
2019-05-09T17:04:00.000Z
|
2021-06-09T06:50:51.000Z
|
pysnmp-with-texts/ATEN-SMI.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 4
|
2019-05-31T16:42:59.000Z
|
2020-01-31T21:57:17.000Z
|
pysnmp-with-texts/ATEN-SMI.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 10
|
2019-04-30T05:51:36.000Z
|
2022-02-16T03:33:41.000Z
|
#
# PySNMP MIB module ATEN-SMI (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/ATEN-SMI
# Produced by pysmi-0.3.4 at Wed May 1 11:30:51 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
OctetString, ObjectIdentifier, Integer = mibBuilder.importSymbols("ASN1", "OctetString", "ObjectIdentifier", "Integer")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ValueRangeConstraint, ConstraintsIntersection, ConstraintsUnion, ValueSizeConstraint, SingleValueConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueRangeConstraint", "ConstraintsIntersection", "ConstraintsUnion", "ValueSizeConstraint", "SingleValueConstraint")
NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance")
IpAddress, ModuleIdentity, iso, Integer32, enterprises, Counter64, MibScalar, MibTable, MibTableRow, MibTableColumn, MibIdentifier, Bits, ObjectIdentity, NotificationType, TimeTicks, Counter32, Unsigned32, Gauge32 = mibBuilder.importSymbols("SNMPv2-SMI", "IpAddress", "ModuleIdentity", "iso", "Integer32", "enterprises", "Counter64", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "MibIdentifier", "Bits", "ObjectIdentity", "NotificationType", "TimeTicks", "Counter32", "Unsigned32", "Gauge32")
DisplayString, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "TextualConvention")
aten = MibIdentifier((1, 3, 6, 1, 4, 1, 21317))
atenProducts = MibIdentifier((1, 3, 6, 1, 4, 1, 21317, 1))
otherEnterprises = MibIdentifier((1, 3, 6, 1, 4, 1, 21317, 2))
atenExperiment = MibIdentifier((1, 3, 6, 1, 4, 1, 21317, 3))
mibBuilder.exportSymbols("ATEN-SMI", atenExperiment=atenExperiment, atenProducts=atenProducts, aten=aten, otherEnterprises=otherEnterprises)
| 100.105263
| 505
| 0.774448
|
5a919cb18a783009cabde095439f56f0008b038d
| 284
|
py
|
Python
|
chat/urls.py
|
vsoch/chat
|
72936adc4a001d22befdbda3223f38fd46f1548a
|
[
"MIT"
] | 1
|
2017-04-25T10:13:31.000Z
|
2017-04-25T10:13:31.000Z
|
chat/urls.py
|
vsoch/chat
|
72936adc4a001d22befdbda3223f38fd46f1548a
|
[
"MIT"
] | null | null | null |
chat/urls.py
|
vsoch/chat
|
72936adc4a001d22befdbda3223f38fd46f1548a
|
[
"MIT"
] | null | null | null |
from django.conf.urls import url, include
from . import views
urlpatterns = [
url(r'', views.index),
url(r'^about/$', views.about, name='about'),
url(r'^new/$', views.new_room, name='new_room'),
url(r'^(?P<label>[\w-]{,50})/$', views.chat_room, name='chat_room'),
]
| 28.4
| 72
| 0.612676
|
508333aded7aa09d83ff0ef79d66d6bcc19678c4
| 4,447
|
py
|
Python
|
stdplugins/changelog.py
|
fforius/BotHub
|
a844a82f6c56caafebf04123955a3901c97de946
|
[
"Apache-2.0"
] | null | null | null |
stdplugins/changelog.py
|
fforius/BotHub
|
a844a82f6c56caafebf04123955a3901c97de946
|
[
"Apache-2.0"
] | null | null | null |
stdplugins/changelog.py
|
fforius/BotHub
|
a844a82f6c56caafebf04123955a3901c97de946
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (C) 2019 The Raphielscape Company LLC.
#
# Licensed under the Raphielscape Public License, Version 1.c (the "License");
# you may not use this file except in compliance with the License.
# edited by @AvinashReddy3108
# re-edited by @Mayur_Karaniya and @Devpatel_73 to make it work for Uniborg.
#
"""
This module generates the changelog for the userbot based on Upstream revision
cmd is .chk
"""
from os import remove
from os import execl
import sys
import heroku3
import git
from git import Repo
from git.exc import GitCommandError
from git.exc import InvalidGitRepositoryError
from git.exc import NoSuchPathError
import asyncio
import random
import re
import time
from collections import deque
import requests
from telethon.tl.functions.users import GetFullUserRequest
from telethon.tl.types import MessageEntityMentionName
from telethon import events
from uniborg.util import admin_cmd
from contextlib import suppress
import os
import sys
import asyncio
async def gen_chlog(repo, diff):
ch_log = ''
d_form = "%d/%m/%y"
for c in repo.iter_commits(diff):
ch_log += f'•[{c.committed_datetime.strftime(d_form)}]: {c.summary} <{c.author}>\n'
return ch_log
async def is_off_br(br):
off_br = ['master']
for k in off_br:
if k == br:
return 1
return
@borg.on(admin_cmd("chk ?(.*)", outgoing=True, allow_sudo=True))
async def upstream(ups):
"For .update command, check if the bot is up to date, update if specified"
await ups.edit("`Checking for updates, please wait....`")
conf = ups.pattern_match.group(1).lower()
off_repo = 'https://github.com/fforius/BotHub.git'
try:
txt = "`Oops.. Updater cannot continue due to some problems occured`\n\n**LOGTRACE:**\n"
repo = Repo()
except NoSuchPathError as error:
await ups.edit(f'{txt}\n`directory {error} is not found`')
return
except GitCommandError as error:
await ups.edit(f'{txt}\n`Early failure! {error}`')
return
except InvalidGitRepositoryError:
repo = Repo.init()
await ups.edit(
"`Warning: Force-Syncing to the latest stable code from repo.`\
\nI may lose my downloaded files during this update."
)
origin = repo.create_remote('upstream', off_repo)
origin.fetch()
repo.create_head('master', origin.refs.master)
repo.heads.master.checkout(True)
ac_br = repo.active_branch.name
if not await is_off_br(ac_br):
await ups.edit(
f'**[UPDATER]:**` Looks like you are using your own custom branch ({ac_br}). \
in that case, Updater is unable to identify which branch is to be merged. \
please checkout to any official branch`')
return
try:
repo.create_remote('upstream', off_repo)
except BaseException:
pass
ups_rem = repo.remote('upstream')
ups_rem.fetch(ac_br)
changelog = await gen_chlog(repo, f'HEAD..upstream/{ac_br}')
if not changelog:
await ups.edit(f'\n`Your BOT is` **up-to-date** `with` **{ac_br}**\n')
return
if conf != "now":
changelog_str = f'**New UPDATE available for [{ac_br}]:\n\nCHANGELOG:**\n`{changelog}`'
if len(changelog_str) > 4096:
await ups.edit("`Changelog is too big, sending it as a file.`")
file = open("output.txt", "w+")
file.write(changelog_str)
file.close()
await ups.client.send_file(
ups.chat_id,
"output.txt",
reply_to=ups.id,
)
remove("output.txt")
else:
await ups.edit(changelog_str)
await ups.respond(
"`do \".update now\" to update\nDon't if using Heroku`")
return
await ups.edit('`New update found, updating...`')
ups_rem.fetch(ac_br)
await ups.edit('`Successfully Updated!\n'
'Bot is restarting... Wait for a second!`')
await install_requirements()
await bot.disconnect()
# Spin a new instance of bot
execl(sys.executable, sys.executable, *sys.argv)
# Shut the existing one down
exit()
"""CMD_HELP.update({
'update':
".update\
\nUsage: Checks if the main userbot repository has any updates and shows a changelog if so.\
\n\n.update now\
\nUsage: Updates your userbot, if there are any updates in the main userbot repository."
})
"""
| 29.450331
| 96
| 0.645829
|
ed7cfa27f29f5f632628a119664344a87dbb8a05
| 789
|
py
|
Python
|
CS101/Python/ex10.py
|
Am3ra/CS
|
b31026280917479b86cf4f1be1261b247513d550
|
[
"MIT"
] | 1
|
2018-08-22T20:17:01.000Z
|
2018-08-22T20:17:01.000Z
|
CS101/Python/ex10.py
|
Am3ra/CS
|
b31026280917479b86cf4f1be1261b247513d550
|
[
"MIT"
] | null | null | null |
CS101/Python/ex10.py
|
Am3ra/CS
|
b31026280917479b86cf4f1be1261b247513d550
|
[
"MIT"
] | null | null | null |
# Question 10
# Level 2
#
# Question:
# Write a program that accepts a sequence of whitespace separated words as input and prints the words after removing all duplicate words and sorting them alphanumerically.
# Suppose the following input is supplied to the program:
# hello world and practice makes perfect and hello world again
# Then, the output should be:
# again and hello makes perfect practice world
#
# Hints:
# In case of input data being supplied to the question, it should be assumed to be a console input.
# We use set container to remove duplicated data automatically and then use sorted() to sort the data.
string = input().split()
result=[]
for i in range(len(string)):
if string.index(string[i]) == i:
result.append(string[i])
print(" ".join(sorted(result)))
| 37.571429
| 171
| 0.746515
|
d7c5145e9bc340813dd19d38ac0d9e1d887aa9a2
| 1,226
|
py
|
Python
|
ws/consumers.py
|
bl146u/asgi
|
48fcf0fd97be11ab0b326ad70e2442ca40346bf6
|
[
"Apache-2.0"
] | null | null | null |
ws/consumers.py
|
bl146u/asgi
|
48fcf0fd97be11ab0b326ad70e2442ca40346bf6
|
[
"Apache-2.0"
] | null | null | null |
ws/consumers.py
|
bl146u/asgi
|
48fcf0fd97be11ab0b326ad70e2442ca40346bf6
|
[
"Apache-2.0"
] | null | null | null |
import json
from asgiref.sync import async_to_sync
from channels.generic.websocket import WebsocketConsumer
class ChatConsumer(WebsocketConsumer):
def connect(self):
self.room_name = self.scope["url_route"]["kwargs"]["room_name"]
self.room_group_name = "chat_%s" % self.room_name
# Join room group
async_to_sync(self.channel_layer.group_add)(
self.room_group_name, self.channel_name
)
self.accept()
def disconnect(self, close_code):
# Leave room group
async_to_sync(self.channel_layer.group_discard)(
self.room_group_name, self.channel_name
)
# Receive message from WebSocket
def receive(self, text_data=None, bytes_data=None):
text_data_json = json.loads(text_data)
message = text_data_json["message"]
# Send message to room group
async_to_sync(self.channel_layer.group_send)(
self.room_group_name, {"type": "chat_message", "message": message}
)
# Receive message from room group
def chat_message(self, event):
message = event["message"]
# Send message to WebSocket
self.send(text_data=json.dumps({"message": message}))
| 30.65
| 78
| 0.663948
|
ceafae40c22ace4dabe0f9878a0d5c669011995c
| 13,989
|
py
|
Python
|
jgem/bigwig.py
|
kensugino/JUGEMu
|
3ebf19c96e41f1c90d63d772fd4c9c5cc3d6886f
|
[
"MIT"
] | null | null | null |
jgem/bigwig.py
|
kensugino/JUGEMu
|
3ebf19c96e41f1c90d63d772fd4c9c5cc3d6886f
|
[
"MIT"
] | null | null | null |
jgem/bigwig.py
|
kensugino/JUGEMu
|
3ebf19c96e41f1c90d63d772fd4c9c5cc3d6886f
|
[
"MIT"
] | null | null | null |
"""
.. module:: bigwig
:synopsis: BIGWIG file related stuffs
.. moduleauthor:: Ken Sugino <ken.sugino@gmail.com>
"""
# system
import os
import subprocess
import time
import multiprocessing
from operator import iadd
import logging
logging.basicConfig(level=logging.DEBUG)
LOG = logging.getLogger(__name__)
import shutil
# 3rd party
import pandas as PD
import numpy as N
# for reading BIGWIG file
#ngslib is faster than bx but fails when parallerized
#import bx
#from bx.bbi.bigwig_file import BigWigFile
#from ngslib import wWigIO
#bx-python is not compatible with Python3 => use modified version
from jgem import utils as UT
from jgem import bedtools as BT
import jgem.cy.bw as cybw #import array2wiggle_chr # Cython version
from jgem.cy.bw import array2wiggle_chr
from jgem.bxbbi.bigwig_file import BigWigFile
MAXSIZE = int(300e6) # 300Mbp bigger than chr1,chrX
# BAM to genomecov BIGWIG ##################################################
def cnt_bam(fpath):
"""
Uses samtools to count reads in BAM file.
Args:
fpath (str): path to BAM file
Returns:
int. The number of aligned reads.
Raises:
RuntimeError
"""
cache = fpath+'.flagstat'
bai = fpath+'.bai'
if not os.path.exists(fpath):
raise RuntimeError('file {0} does not exist'.format(fpath))
if not os.path.exists(bai):
cmd = ['samtools', 'index', fpath]
subprocess.call(cmd)
if os.path.exists(cache) and os.path.getmtime(cache)>os.path.getmtime(fpath):
out = open(cache,'r').read()
else:
cmd = ['samtools', 'flagstat', fpath]
out = subprocess.check_output(cmd)
open(cache,'wb').write(out)
firstline = out.split('\n')[0].split()
return int(firstline[0])+int(firstline[2])
# def wig2bw(wigpath, chromsizes, bwpath): # in bedtools
# pass
def bam2bw(fpath, chromsizes, bpath, aligned=None):
"""
Generate normalized coverage from BAM
Args:
fpath (str): path to BAM
chromsizes (str): path to chromsizes file
bpath (str): path to BIGWIG
aligned (int): number of aligned reads, if None uses samtools to find it from BAM
Requires Bedtools (genomeCoverageBed) and Kent Tool (wigToBigWig)
"""
# countreads
if aligned is None:
aligned = cnt_bam(fpath)
scale = 1000000./float(aligned)
# convert_to_wig
tpath = bpath +'.wig'
UT.makedirs(os.path.dirname(tpath))
tfobj = open(tpath,'wb')
cmd1 = ['genomeCoverageBed', '-split', '-bg', '-ibam', fpath, '-g', chromsizes, '-scale', str(scale)]
p1 = subprocess.Popen(cmd1, stdout=tfobj)
p1.wait()
tfobj.close()
# convet_wig_to_bigwig
cmd2 = ['wigToBigWig', tpath, chromsizes, bpath]
p2 = subprocess.call(cmd2)
# remove_temporary_file
os.remove(tpath)
# bw2bed based on #########################################################
# https://github.com/CGATOxford/cgat/blob/master/scripts/wig2bed.py
# [TODO] report bug to AndreasHeger line 80 "elif => if"
def block_iter(infile, chrom, chunk=int(10e6)):
"BigWig file iterator"
with open(infile, mode='rb') as fobj:
bw = BigWigFile(fobj)
for x in range(0,MAXSIZE,chunk): # 10Mbp chunk
iterator = bw.get(chrom, x, x+chunk)
if iterator is None:
raise StopIteration
for v in iterator:
yield v
# def block_iter_ngs(infile, chrom, chunk=int(10e6)):
# # ngslib is faster than bx but fails when parallerized
# wWigIO.open(infile)
# for x in range(0,MAXSIZE,chunk): # 10Mbp chunk
# iterator = wWigIO.getIntervals(infile, chrom, x, x+chunk)
# if not iterator:
# raise StopIteration
# for v in iterator:
# yield v
# wWigIO.close(infile)
def apply_threshold(infile, threshold, chroms):
"""Only returns intervals exceeding the threshold
Args:
infile: path to bigwig
threshold: positive float
chroms: list of chromosome names
Yields:
intervals (chr,st,ed)
"""
for chrom in chroms:
last_start, last_end = -1, 0
#for start, end, value in block_iter_ngs(infile, chrom):
for start, end, value in block_iter(infile, chrom):
# start=>end has value
d = start - last_end
# d: distance from last_end
# d> 0 => last_end==>start value is 0 assume threshold>=0
if (d > 0 or value <= threshold):
if last_start >= 0: # if there's un-yielded interval then yield
yield chrom, last_start, last_end
last_start = -1 # reset (no hit)
#elif last_start < 0 and value > threshold:
# this part is a bug from the original code
# original code will skip the next interval
if last_start < 0 and value > threshold:
# last_start <0 => no current interval
# and value above threshod ==> start new interval
last_start = start
last_end = end
if last_start >= 0:
yield chrom, last_start, end
def bw2bed(bwfile, bedfile, chroms, th, compress=True):
"""Transform BigWig genomeCov to binary BED by thresholding.
Makes result file (bwfile[:-3]+'.binary%g.bed'.format(th))
Args:
bwfile: path to BigWig file
chroms: list of chromosome names
th: coverage threshold
Returns:
path to generated BED file
"""
bedbase = bedfile[:-3] if bedfile[-3:]=='.gz' else bedfile
#bedfile = '{0}.binary{1:g}.bed'.format(bwfile[:-3], th)
if UT.notstale(bwfile, bedbase+'.gz'):
return bedbase+'.gz'
# make sure bwfile exists
if not ( os.path.exists(bwfile) ):
raise RuntimeError('BigWig file {0} does not exist.'.format(bwfile))
processor = apply_threshold(bwfile,th, chroms)
UT.makedirs(os.path.dirname(bedfile))
out = open(bedbase,'w')
out.write(''.join(['%s\t%i\t%i\n' % x for x in processor]))
#out.write('\n') #<= this introduces space inbetween chroms in mp ode
# which terminates bedtools at chr1
out.close()
if compress:
return UT.compress(bedbase)
return bedbase
def bw2bed_mp(bwfile, bedfile, chroms, th, np=4):
""" multi CPU version of bw2bed """
args = []
files = []
for chrom in chroms:
bedchromfile = bedfile+'.{0}.bed.gz'.format(chrom)
files.append(bedchromfile)
args.append((bwfile,bedchromfile,[chrom],th,False))
rslts = UT.process_mp(bw2bed, args, np=np, doreduce=False)
# concatenate gz files
bedbase = bedfile[:-3] if bedfile[-3:]=='.gz' else bedfile
with open(bedbase, 'wb') as dst:
for f in rslts:
with open(f, 'rb') as src:
shutil.copyfileobj(src, dst)
# !!! bedtool gzip problem againg !!!
# bedtools only process first one if just concatenate gzipped files
# => concatenate unzipped and gzip whole thing at the end
bedfile = UT.compress(bedbase)
# clean up temp files
for f in rslts:
os.unlink(f)
return bedfile
### covbp, totbp, avgcov from bigwig array #################################
def get_totbp_covbp_bw(bwfile, genome, chroms=None):
""" Calculate total bp, covered bp, mean coverage, covered %.
Args:
bwfile: bigwig file
genome: UCSC genome name
chroms (list): of chromosomes
Returns:
Pandas dataframe
"""
chromdf = UT.chromdf(genome).set_index('chr')['size']
def one(chrom):
csize = chromdf.ix[chrom]
a = get_bigwig_as_array(bwfile, chrom, 0, csize)
totbp = N.sum(a)
covbp = N.sum(a>0)
acov = float(totbp)/covbp
covp = (float(covbp)/csize)*100.
return {'totbp':totbp,'covbp':covbp,'acov':acov,'cov%':covp}
if chroms is None:
chroms = UT.chroms(genome)
df = PD.DataFrame({x: one(x) for x in chroms})
return df
### Merge BigWigs ##########################################################
def get_bigwig_as_array(bwfile, chrom, st, ed):
"""Get BIGWIG coverage values as array of size (ed-st). Array start corresponds to st.
0-based
Args:
bwfile: path to BIGWIG
chrom (str): chromosome name
st (int): start position
ed (int): end position
Returns:
Numpy array of size (ed-st)
"""
# with open(bwfile, mode='rb') as fobj:
# bw = BigWigFile(fobj)
# it = bw.get(chrom,st,ed)
# a = N.zeros(ed-st)
# for s,e,v in it:
# a[s-st:e-st] += v
# return a
if UT.isstring(bwfile):
with open(bwfile, mode='rb') as fobj:
bw = BigWigFile(fobj)
a = bw.get_as_array(chrom,st,ed)
if a is None:
a = N.array([]) # null array
else:
a[N.isnan(a)]=0.
else:
a = bwfile.get_as_array(chrom,st,ed)
if a is None:
a = N.array([]) # null array
else:
a[N.isnan(a)]=0.
return a
def merge_bigwigs_chr(bwfiles, chrom, chromsize, dstpath, scale):
# merge4-allsample.bw chr1 89026991 intervals ~50%
# better to just use dense array than sparse array
a = N.zeros(chromsize)
for fpath in bwfiles:
with open(fpath,mode='rb') as fobj:
bw = BigWigFile(fobj)
it = bw.get(chrom, 0, chromsize)
if it is not None:
for s,e,v in it:
a[s:e] += v
#a = a/float(len(bwfiles))
if scale is not None:
a = a*scale
a = N.array(a, dtype=N.float32)
cybw.array2wiggle_chr(a, chrom, dstpath)
return (chrom, dstpath)
def merge_bigwigs_mp(bwfiles, genome, dstpath, scale=None, np=7):
chroms = UT.chroms(genome)
chromfile = UT.chromsizes(genome)
chromsizes = UT.df2dict(UT.chromdf(genome), 'chr', 'size')
# reorder chroms, so that chrX doesn't get processed alone at the end wasting MP time
tmp = sorted([(chromsizes[c],c) for c in chroms])[::-1]
chroms = [x[1] for x in tmp]
args = [(bwfiles, c, chromsizes[c], dstpath+'.{0}.wig'.format(c), scale) for c in chroms]
rslts = UT.process_mp(merge_bigwigs_chr, args, np, doreduce=False)
dic = dict(rslts)
LOG.debug('concatenating chromosomes...')
wigpath = dstpath+'.wig'
UT.makedirs(os.path.dirname(wigpath))
with open(wigpath, 'wb') as dst:
for c in chroms:
with open(dic[c],'rb') as src:
shutil.copyfileobj(src, dst)
LOG.debug('converting wiggle to bigwig')
BT.wig2bw(wigpath, chromfile, dstpath)
# clean up
for c in chroms:
f = dstpath+'.{0}.wig'.format(c)
if os.path.exists(f):
os.unlink(f)
if os.path.exists(wigpath):
os.unlink(wigpath)
# def array2wiggle_chr(a, chrom, dstpath):
# possibly Cythonify
# def _gen():
# i = 0
# # skip initial 0
# while(a[i]==0):
# i+=1
# st = i
# c = a[st] # initial non-zero
# i+=1
# while(i<len(a)):
# # advance until change
# while(i<len(a) and a[i]==c):
# i+=1
# if c!=0:
# # 0-based => 1-based
# yield '{0}\t{1}\t{2}\t{3}\n'.format(chrom,st,i,c)
# if i<len(a):
# st = i
# c = a[st]
# with open(dstpath,'w') as fobj:
# if N.sum(a)>0: # some elements are not zero
# cnt = 0
# txt = []
# for line in _gen():
# txt.append(line)
# cnt += 1
# if cnt == 100000:
# fobj.write(''.join(txt))
# cnt = 0
# txt = []
# fobj.write(txt)
# return dstpath
### Convenience classes ###################################################
class BWObj(object):
def __init__(self, fpath):
self.fpath = fpath
def __enter__(self):
self.fobj = open(self.fpath, 'rb')
self.bw = BigWigFile(self.fobj)
return self
def __exit__(self, exc_type, exc_value, traceback):
self.fobj.close()
def get(self, chrom, st, ed):
a = self.bw.get_as_array(chrom,st,ed)
if a is None:
a = N.array([]) # null array
else:
a[N.isnan(a)]=0.
return a
class BWs(object):
def __init__(self, paths):
self.bwobjs = [BWObj(p) for p in paths]
def __enter__(self):
for b in self.bwobjs:
b.__enter__()
def __exit__(self, exc_type, exc_value, traceback):
for b in self.bwobjs:
b.__exit__(exc_type, exc_value, traceback)
def get(self, chrom, st, ed):
a = self.bwobjs[0].get(chrom, st, ed)
for b in self.bwobjs[1:]:
a += b.get(chrom, st, ed)
return a
class MultiBigWigs(object):
def __init__(self, plus, minus=[]):
"""
Args:
plus: list of bigwig paths to add
minus: list of bigwig paths to subtract
"""
self.ps = set(plus)
self.ns = set(minus)
def make_bws(self):
self.bws = bws = {}
bws['p'] = [BWObj(f) for f in self.ps if os.path.exists(f)]
bws['n'] = [BWObj(f) for f in self.ns if os.path.exists(f)]
def __enter__(self):
for k in ['p','n']:
for b in self.bws[k]:
b.__enter__()
def __exit__(self, exc_type, exc_value, traceback):
for k in ['p','n']:
for b in self.bws[k]:
b.__exit__(exc_type, exc_value, traceback)
def get(self, chrom, st, ed):
a = self.bws['p'][0].get(chrom, st, ed)
for b in self.bws['p'][1:]:
a += b.get(chrom, st, ed)
for b in self.bws['n']:
a -= b.get(chrom, st, ed)
return a
| 30.610503
| 105
| 0.563943
|
f83b5b605d44e5d91681fa505b59ef0a4cfaa355
| 2,107
|
py
|
Python
|
src/ml/speech_rec/LAS/models/listen.py
|
HayleyO/Senior-Design
|
853a2d2cdaf10f1614297fc0d9d50359d3757005
|
[
"MIT"
] | null | null | null |
src/ml/speech_rec/LAS/models/listen.py
|
HayleyO/Senior-Design
|
853a2d2cdaf10f1614297fc0d9d50359d3757005
|
[
"MIT"
] | 12
|
2022-01-16T21:21:26.000Z
|
2022-03-29T23:13:15.000Z
|
src/ml/speech_rec/LAS/models/listen.py
|
HayleyO/Senior-Design
|
853a2d2cdaf10f1614297fc0d9d50359d3757005
|
[
"MIT"
] | null | null | null |
import torch.nn as nn
class pBLSTMLayer(nn.Module):
def __init__(self, input_feature_dim, hidden_dim):
super(pBLSTMLayer, self).__init__()
#Because we'll be reducing the time dimensions by 2, we'll need to increase feature dimensions by two
self.BLSTM = nn.LSTM(input_feature_dim*2, hidden_dim,1, bidirectional=True, batch_first=True)
# Input shape should be [# of sample, timestep, features]
def forward(self, input_x):
batch_size = input_x.size(0)
timestep = input_x.size(1)
feature_dim = input_x.size(2)
# As per the arvix paper:
# "In each successive stacked pBLSTM layer, we reduce the time resolution by a factor of 2."
input_x = input_x.contiguous().view(batch_size,int(timestep/2),feature_dim*2)
output, hidden = self.BLSTM(input_x)
return output, hidden
class Listener(nn.Module):
def __init__(self, input_feature_dim=39, listener_hidden_dim=256, BLSTM_layers=3, **kwargs):
super(Listener, self).__init__()
"""
input_feature_dim: 39 (because of mfcc 39) [#num of sample, timestep, features]
listener_hidden_dim: 256 (from paper)
BLSTM_layers: 3 (because of the paper)
"""
self.BLSTM_layers = BLSTM_layers
assert self.LSTM_layers>=1, 'Cant have a listener without at least one layer'
#List of listeners
pBLSTM = []
pBLSTM.append(pBLSTMLayer(input_feature_dim,listener_hidden_dim))
for i in range(1, self.BLSTM_layers):
pBLSTM.append(pBLSTMLayer(listener_hidden_dim*2,listener_hidden_dim))
def forward(self,input_x):
output, _ = self.pBLSTM[0](input_x)
for i in range(1,self.listener_layer):
#As per the arvix paper:
"""In the pBLSTM model, we concatenate the outputs at consecutive steps of each layer before feeding it to the next layer, i.e.:
h^j_i = pBLSTM(h^j_i, [h^(j-1)_2i, h^(j-1)_(2i+1)])"""
output, _ = self.pBLSTM[i](output)
return output
| 41.313725
| 140
| 0.641196
|
7d8d36488288bd166fcafd4186b28ea2b654a7da
| 5,307
|
py
|
Python
|
bin/bashthebug-classifications-analyse.py
|
philipwfowler/bashthebug
|
8b229078405b9e1f46d3152fcfe7cd9d77c54b30
|
[
"MIT"
] | null | null | null |
bin/bashthebug-classifications-analyse.py
|
philipwfowler/bashthebug
|
8b229078405b9e1f46d3152fcfe7cd9d77c54b30
|
[
"MIT"
] | null | null | null |
bin/bashthebug-classifications-analyse.py
|
philipwfowler/bashthebug
|
8b229078405b9e1f46d3152fcfe7cd9d77c54b30
|
[
"MIT"
] | null | null | null |
#! /usr/bin/env python
import argparse, logging
import pandas
import bashthebug
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("--input",required=True,help="the csv file downloaded from the Zooniverse containing all the classifcations done to date")
parser.add_argument("--from_date",required=False,help="the date to consider classifications from (ISO format e.g. 2017-04-07)")
parser.add_argument("--to_date",required=False,help="the date to consider classifications up to")
parser.add_argument("--timings",action='store_true',default=False,help="print the time taken for each step")
parser.add_argument("--flavour",default="regular",type=str,help="whether to create a regular BASHTHEBUG table or final BASHTHEBUGPRO table (regular/pro)")
options = parser.parse_args()
assert options.flavour in ['regular','pro'], 'unrecognised flavour of BashTheBug!'
# parse the output file to work out the output stem
if options.flavour=="regular":
output_stem=options.input.split("bash-the-bug-classifications")[1].split(".csv")[0]
elif options.flavour=="pro":
output_stem=options.input.split("bash-the-bug-pro-classifications")[1].split(".csv")[0]
print("Reading classifications from CSV file...")
if options.to_date:
if options.from_date:
if options.flavour=='pro':
current_classifications=bashthebug.BashTheBugClassifications(zooniverse_file=options.input,to_date=options.to_date,from_date=options.from_date,live_rows=False,flavour=options.flavour)
else:
current_classifications=bashthebug.BashTheBugClassifications(zooniverse_file=options.input,to_date=options.to_date,from_date=options.from_date,flavour=options.flavour)
else:
if options.flavour=='pro':
current_classifications=bashthebug.BashTheBugClassifications(zooniverse_file=options.input,to_date=options.to_date,live_rows=False,flavour=options.flavour)
else:
current_classifications=bashthebug.BashTheBugClassifications(zooniverse_file=options.input,to_date=options.to_date,flavour=options.flavour)
elif options.from_date:
if options.flavour=='pro':
current_classifications=bashthebug.BashTheBugClassifications(zooniverse_file=options.input,from_date=options.from_date,live_rows=False,flavour=options.flavour)
else:
current_classifications=bashthebug.BashTheBugClassifications(zooniverse_file=options.input,from_date=options.from_date,flavour=options.flavour)
else:
if options.flavour=='pro':
current_classifications=bashthebug.BashTheBugClassifications(zooniverse_file=options.input,live_rows=False,flavour=options.flavour)
else:
current_classifications=bashthebug.BashTheBugClassifications(zooniverse_file=options.input,flavour=options.flavour)
current_classifications.extract_classifications()
most_recent_date=str(current_classifications.classifications.created_at.max().date().isoformat())
# open a log file to record images where the wells cannot be identified
if options.flavour=="regular":
logging.basicConfig(filename="log/bashthebug-classifications-analyse-"+most_recent_date+".log",level=logging.INFO,format='%(levelname)s: %(message)s', datefmt='%a %d %b %Y %H:%M:%S')
elif options.flavour=="pro":
logging.basicConfig(filename="log/bashthebugpro-classifications-analyse-"+most_recent_date+".log",level=logging.INFO,format='%(levelname)s: %(message)s', datefmt='%a %d %b %Y %H:%M:%S')
current_classifications.create_measurements_table()
current_classifications.create_users_table()
for sampling_time in ['month','week','day']:
if options.flavour=="regular":
current_classifications.plot_classifications_by_time(sampling=sampling_time,filename='pdf/graph-classifications-'+sampling_time+'.pdf',add_cumulative=True)
current_classifications.plot_users_by_time(sampling=sampling_time,filename='pdf/graph-users-'+sampling_time+'.pdf',add_cumulative=True)
elif options.flavour=="pro":
current_classifications.plot_classifications_by_time(sampling=sampling_time,filename='pdf/graph-pro-classifications-'+sampling_time+'.pdf',add_cumulative=True)
current_classifications.plot_users_by_time(sampling=sampling_time,filename='pdf/graph-pro-users-'+sampling_time+'.pdf',add_cumulative=True)
if options.flavour=="regular":
current_classifications.plot_user_classification_distribution(filename="pdf/graph-user-distribution.pdf")
elif options.flavour=='pro':
current_classifications.plot_user_classification_distribution(filename="pdf/graph-pro-user-distribution.pdf")
logging.info(current_classifications)
print("Saving compressed PKL file...")
# current_classifications.save_csv("dat/bash-the-bug-classifications.csv.bz2",compression=True)
if options.flavour=="regular":
current_classifications.save_pickle("dat/bash-the-bug-classifications.pkl.bz2")
elif options.flavour=='pro':
current_classifications.save_pickle("dat/bash-the-bug-pro-classifications.pkl.bz2")
logging.info(current_classifications.users[["classifications","rank"]][:20])
| 58.318681
| 199
| 0.751837
|
42307dbbbf1916f297064ceaf7ac2de21b021620
| 11,021
|
py
|
Python
|
base_streamlit.py
|
Washbourne/test
|
0c118a544867ad3a0deb8f25749205d6d56b29f5
|
[
"CNRI-Python-GPL-Compatible"
] | null | null | null |
base_streamlit.py
|
Washbourne/test
|
0c118a544867ad3a0deb8f25749205d6d56b29f5
|
[
"CNRI-Python-GPL-Compatible"
] | null | null | null |
base_streamlit.py
|
Washbourne/test
|
0c118a544867ad3a0deb8f25749205d6d56b29f5
|
[
"CNRI-Python-GPL-Compatible"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Created on Tue Dec 29 06:34:35 2020
@author: Horri
"""
import streamlit as st
import random as r
import numpy as np
import pandas as pd
# # # # # # #
#left_col, right_col = st.beta_columns(2)
st.title('WFRP Dice Roller')
st.button('Roll!')
combat_bool = st.sidebar.checkbox('Combat Roll?', False)
weapon = st.sidebar.selectbox('Select your Weapon', ['Hand Weapon', 'Dagger', 'Pick', 'Long Bow', 'Handgun','Pistol'])
impale_trait = st.sidebar.checkbox('Impale Trait?', False)
damaging_trait = st.sidebar.checkbox('Damaging Trait?', False)
dangerous_trait = st.sidebar.checkbox('Dangerous Trait', False)
slow_trait = st.sidebar.checkbox('Slow Trait?', False)
strength_bonus = st.sidebar.text_input('Input Strength Bonus')
left_col, right_col = st.beta_columns(2)
str_threshold = st.text_input('Input your Skill/Attribute Score', 40)
advantage = st.slider('Select your Advantage Modifer', -5,5,0)
# # # # # # #
int_threshold = None
str_roll = None
int_roll = None
hit_loc = None
def check_input_type(threshold):
global int_threshold
try:
int_threshold = int(str_threshold)
if int_threshold < 101 | int_threshold > 0:
return int_threshold
else:
st.write('Error: Score outside range')
except:
st.write('Input Type Error')
check_input_type(str_threshold)
def roll_dice():
global str_roll
global int_roll
roll = r.randint(1, 100)
int_roll = roll
if roll >= 10:
str_roll = str(roll)
else:
str_roll = '0'+str(roll)
def determine_advantage(advantage):
global str_threshold
global int_threshold
mod_advantage = advantage * 10
int_threshold = int_threshold + mod_advantage
st.write(f'''
------ Rolled: {str_roll} ------ Threshold: {int_threshold} ------
''')
def fumble():
if combat_bool == True:
if int_roll <= 20:
st.write('Fumble: You catch a part of your anatomy. Lose one wound ignoring TB and AP')
elif int_roll <= 40:
st.write('Fumble: Your weapon jars badly and suffers 1 damage, next round you act last regardless of initiatve')
elif int_roll <= 60:
st.write('Fumble: Next round your action suffers a -10 penalty')
elif int_roll <= 70:
st.write('Fumble: You stumble badly, lose your movement next round')
elif int_roll <= 80:
st.write('Fumble: You mishandle your weapon, miss your next action')
elif int_roll <= 90:
st.write('Fumble: You overextend yourself, suffer a Torn Muscle (Minor) injury (see page 179) this counts as a critical wound')
elif int_roll <= 100:
st.write('Fumble: You complete mess up, hitting 1 random ally in range. If not possible, you are stunned')
critical_head_injuries = {
'Dramatic Injury':[1,'A fine wound across the forehead and cheek. Gain 1 Bleeding Condition. Once the wound is healed, the impressive scar it leaves provides a bonus of +1 SL to appropriate social Tests. You can only gain this benefit once.'],
'Minor Cut':[1,'The strike opens your cheek and blood flies. Gain 1 Bleeding Condition.'],
'Poked Eye':[1,'The blow glances across your eye socket. Gain 1 Blinded condition.'],
'Ear Bash':[1,'After a sickening impact, your ear is left ringing. Gain 1 Deafened Condition.'],
'Rattling Blow':[2,'The blow floods your vision with flashing lights. Gain 1 Stunned Condition.'],
'Black Eye':[2,'A solid blow hits your eye, leaving tears and pain. Gain 2 Blinded Conditions.'],
'Sliced Ear':[2,'Your side of your head takes a hard blow, cutting deep into your ear. Gain 2 Deafened and 1 Bleeding Condition.'],
'Struck Forehead':[2,'A solid blow hits your forehead. Gain 2 Bleeding Conditions and a Blinded Condition that cannot be removed until all Bleeding Conditions are removed.'],
'Fractured Jaw':[3,'With a sickening crunch, pain fills your face as the blow fractures your jaw. Gain 2 Stunned Conditions. Suffer a Broken Bone (Minor) injury.'],
'Major Eye Wound':[3,'The blow cracks across your eye socket. Gain 1 Bleeding Condition. Also gain 1 Blinded Condition that cannot be removed until you receive Medical Attention.'],
'Major Ear Wound':[3,'The blow strikes deep into one ear. Suffer a permanent –20 penalty on all Tests relating to hearing. If you suffer this result again, your hearing is permanently lost as the second ear falls quiet. Only magic can heal this.'],
'Broken Nose':[3,'A solid blow to the centre of your face causing blood to pour. Gain 2 Bleeding Conditions. Make a Challenging (+0) Endurance Test, or also gain a Stunned Condition. After this wound has healed, gain +1/–1 SL on social rolls, depending on context, unless Surgery is used to reset the nose.'],
'Broken Jaw':[4,'The crack is sickening as the blow hits you under the chin, breaking your jaw. Gain 3 Stunned Conditions. Make a Challenging (+0) Endurance Test or gain an Unconscious Condition. Suffer a Broken Bone (Major) injury.'],
'Concussive Blow':[4,'Your brain rattles in your skull as blood spurts from your nose and ears. Take 1 Deafened , 2 Bleeding , and 1d10 Stunned Conditions. Gain a Fatigued Condition that lasts for 1d10 days. If you receive another Critical Wound to your head while suffering this Fatigued Condition, make an Average (+20) Endurance Test or also gain an Unconscious Condition.'],
'Smashed Mouth':[4,'With a sickening crunch, your mouth is suddenly filled with broken teeth and blood. Gain 2 Bleeding Conditions. Lose 1d10 teeth — Amputation (Easy).'],
'Mangled Ear':[4,'Little is left of your ear as the blow tears it apart. You gain 3 Deafened and 2 Bleeding Conditions. Lose your ear —Amputation (Average).'],
'Devastated Eye':[5,'A strike to your eye completely bursts it, causing extraordinary pain. Gain 3 Blinded , 2 Bleeding , and 1 Stunned Condition. Lose your eye — Amputation (Difficult).'],
'Disfiguring Blow':[5,'The blow smashes your entire face, destroying your eye and nose in a cloud of blood. Gain 3 Bleeding , 3 Blinded and 2 Stunned Conditions. Lose your eye and nose — Amputation (Hard).'],
'Mangled Jaw':[5,'The blow almost removes your jaw as it utterly destroys your tongue, sending teeth flying in a shower of blood. Gain 4 Bleeding and 3 Stunned Conditions. Make a Very Hard (–30) Endurance Test or gain an Unconscious Condition. Suffer a Broken Bone (Major) injury and lose your tongue and 1d10 teeth — Amputation (Hard).'],
'Decapitated':[100,'Your head is entirely severed from your neck and soars through the air, landing 1d10 feet away in a random direction (see Scatter). Your body collapses, instantly dead.'],
}
def determine_critical_loc():
crit_roll = r.randint(1, 100)
if crit_roll <= 10:
return critical_head_injuries['Dramatic Injury'][1]
elif crit_roll <= 20:
return critical_head_injuries['Minor Cut'][1]
elif crit_roll <= 25:
return critical_head_injuries['Poked Eye'][1]
elif crit_roll <= 30:
return critical_head_injuries['Ear Bash'][1]
elif crit_roll <= 35:
return critical_head_injuries['Rattling Blow'][1]
elif crit_roll <= 40:
return critical_head_injuries['Black Eye'][1]
elif crit_roll <= 45:
return critical_head_injuries['Sliced Ear'][1]
elif crit_roll <= 50:
return critical_head_injuries['Struck Forehead'][1]
elif crit_roll <= 55:
return critical_head_injuries['Fractured Jaw'][1]
elif crit_roll <= 60:
return critical_head_injuries['Major Eye Wound'][1]
elif crit_roll <= 65:
return critical_head_injuries['Major Ear Wound'][1]
elif crit_roll <= 70:
return critical_head_injuries['Broken Nose'][1]
elif crit_roll <= 75:
return critical_head_injuries['Broken Jaw'][1]
elif crit_roll <= 80:
return critical_head_injuries['Concussive Blow'][1]
elif crit_roll <= 85:
return critical_head_injuries['Smashed Mouth'][1]
elif crit_roll <= 90:
return critical_head_injuries['Mangled Ear'][1]
elif crit_roll <= 93:
return critical_head_injuries['Devastated Eye'][1]
elif crit_roll <= 96:
return critical_head_injuries['Disfiguring Blow'][1]
elif crit_roll <= 99:
return critical_head_injuries['Mangled Jaw'][1]
elif crit_roll <= 101:
return critical_head_injuries['Decapitated'][1]
def determine_critical(str_roll, int_roll):
if (str_roll[0] == str_roll[1]) or (impale_trait and int_roll % 10 == 0):
if int_roll <= int_threshold:
var = determine_critical_loc()
st.write(f'{var}')
else:
st.write('Critical Failure!')
fumble()
def determine_hit_loc(str_roll, int_roll):
global hit_loc
loc = int(str_roll[1]+str_roll[0])
st.title('Hit Location')
if loc < 10:
st.write('Head!')
hit_loc = 'Head'
elif loc < 25:
st.write('Left Arm')
elif loc < 45:
st.write('Right Arm')
elif loc < 80:
st.write('Body')
elif loc < 90:
st.write('left Leg')
elif loc < 101:
st.write('Right Leg')
def validate_combat():
try:
if combat_bool:
if strength_bonus == False:
st.write('ERROR: Enter in your Strenght Bonus.')
except:
raise Exception('uh oh!')
def evaluate_dice(int_threshold, combat_bool, str_roll, int_roll):
if int_roll <= int_threshold:
st.write(f'{str_roll[0]} degree(s) of Success!')
else:
st.write(f'{str_roll[0]} degree(s) of Failure!')
if combat_bool == True:
if validate_combat():
determine_hit_loc(str_roll, int_roll)
if dangerous_trait == True:
if (str_roll[0] or str_roll[1]) == ('9' or 9):
fumble()
def determine_weapon():
global strength_bonus
weapon_list = {'Hand Weapon':4, 'Dagger':2, 'Pick':5, 'Long Bow':4, 'Handgun':9,'Pistol':8}
if weapon in weapon_list.keys():
if int_roll <= int_threshold:
if weapon == ('Handgun' or 'Pistol'):
st.write(f'{weapon_list[weapon]+int(str_roll[0])} dmg')
if damaging_trait:
st.write(f'or {weapon_list[weapon]+int(str_roll[1])} dmg')
else:
st.write(f'{weapon_list[weapon]+int(str_roll[0])+int(strength_bonus)} dmg')
if damaging_trait:
st.write(f'or {weapon_list[weapon]+int(str_roll[1])} dmg')
else:
if weapon == ('Handgun' or 'Pistol'):
st.write(f'{weapon_list[weapon]} dmg')
else:
st.write(f'{weapon_list[weapon]+int(strength_bonus)} dmg')
# # # # # # #
st.title('Results')
roll_dice()
determine_advantage(advantage)
determine_critical(str_roll, int_roll)
evaluate_dice(int_threshold, combat_bool, str_roll, int_roll)
determine_weapon()
| 45.920833
| 379
| 0.667362
|
8a28b21e82d15cf1eb1889cd9ba91a69be267ce8
| 148
|
py
|
Python
|
hikyuu/admin/service/__init__.py
|
kknet/hikyuu
|
650814c3e1d32894ccc1263a0fecd6693028d2e3
|
[
"MIT"
] | 1,283
|
2016-04-06T16:06:46.000Z
|
2022-03-31T06:37:53.000Z
|
hikyuu/admin/service/__init__.py
|
kknet/hikyuu
|
650814c3e1d32894ccc1263a0fecd6693028d2e3
|
[
"MIT"
] | 47
|
2017-08-12T10:37:03.000Z
|
2022-03-28T13:22:21.000Z
|
hikyuu/admin/service/__init__.py
|
kknet/hikyuu
|
650814c3e1d32894ccc1263a0fecd6693028d2e3
|
[
"MIT"
] | 416
|
2017-05-15T05:28:26.000Z
|
2022-03-22T07:44:15.000Z
|
# -*- coding: utf-8 -*-
from .restful import login
from .assist import AssisService
from .user import UserService
from .trade import TradeService
| 18.5
| 32
| 0.756757
|
d2d4ff6c9de958d7407dbd4bdf8564608fe297ae
| 787
|
py
|
Python
|
cls_pickup_shortdistance.py
|
naotohori/cafysis
|
9d8534121c01ea75ae965cf39a1e307052ff8523
|
[
"MIT"
] | 2
|
2022-02-25T17:32:41.000Z
|
2022-03-31T14:38:55.000Z
|
cls_pickup_shortdistance.py
|
naotohori/cafysis
|
9d8534121c01ea75ae965cf39a1e307052ff8523
|
[
"MIT"
] | 2
|
2020-05-03T08:36:10.000Z
|
2021-01-27T12:40:50.000Z
|
cls_pickup_shortdistance.py
|
naotohori/life-of-py
|
9d8534121c01ea75ae965cf39a1e307052ff8523
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
import sys
if len(sys.argv) != 5:
print ('\n Usage: SCRIPT [distance file] [mpvec file] [cutoff] [output vec file]\n')
sys.exit(2)
f_dist = open(sys.argv[1], 'r')
f_mpvec = open(sys.argv[2],'r')
cutoff = float(sys.argv[3])
f_out = open(sys.argv[-1],'w')
imps_output = []
for line in f_dist :
linesp = line.split()
imp = int(linesp[1]) # column
dist = float(linesp[4])
if dist <= cutoff :
imps_output.append(imp)
f_dist.close()
for line in f_mpvec:
linesp = line.split()
imp = int(linesp[1]) # column
if imp in imps_output :
v = (float(linesp[2]), float(linesp[3]), float(linesp[4]))
else :
v = (0.0, 0.0, 0.0)
f_out.write('%30.20E\n%30.20E\n%30.20E\n' % v)
f_mpvec.close()
f_out.close
| 23.147059
| 88
| 0.590851
|
de47b9bb7fe61595f02203dfc79565254c74a622
| 12,069
|
py
|
Python
|
test/functional/qtumcash_create_eth_op_code.py
|
qtumcashproject/qtumcash
|
4f095de839e524c4d43a861769695d199bfd7544
|
[
"MIT"
] | 2
|
2020-04-30T09:41:04.000Z
|
2020-05-08T12:03:21.000Z
|
test/functional/qtumcash_create_eth_op_code.py
|
qtumcashproject/qtumcash
|
4f095de839e524c4d43a861769695d199bfd7544
|
[
"MIT"
] | null | null | null |
test/functional/qtumcash_create_eth_op_code.py
|
qtumcashproject/qtumcash
|
4f095de839e524c4d43a861769695d199bfd7544
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
from test_framework.script import *
from test_framework.mininode import *
from test_framework.qtumcash import *
from test_framework.address import *
class QtumCashCreateEthOpCodeTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def assert_address_with_value_in_unspents(self, address, value):
for unspent in self.node.listunspent():
if unspent['address'] == address:
assert_equal(unspent['amount'], value)
break
else:
assert(False)
def create_contract_with_value_from_contract_test(self):
"""
pragma solidity ^0.4.7;
contract Factory {
bytes code;
address public newAddress;
function create() payable {
code = hex"60606040525b5b5b60358060146000396000f30060606040525b5b5b0000a165627a7a72305820c09bfe42796663bc047f817fd76fe3537f040acc4a39c783c9f41493c88dd24d0029";
bytes memory mem_code = code;
address newAddr;
assembly {
newAddr := create(100, add(mem_code,0x20), mload(mem_code))
}
newAddress = newAddr;
}
}
"""
factory_with_value_contract_address = self.node.createcontract("6060604052341561000f57600080fd5b5b6103448061001f6000396000f30060606040526000357c0100000000000000000000000000000000000000000000000000000000900463ffffffff168063ccdb3f4514610049578063efc81a8c1461009e575b600080fd5b341561005457600080fd5b61005c6100a8565b604051808273ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200191505060405180910390f35b6100a66100ce565b005b600160009054906101000a900473ffffffffffffffffffffffffffffffffffffffff1681565b6100d661025f565b6000608060405190810160405280604981526020017f60606040525b5b5b60358060146000396000f30060606040525b5b5b0000a16581526020017f627a7a72305820c09bfe42796663bc047f817fd76fe3537f040acc4a39c783c981526020017ff41493c88dd24d0029000000000000000000000000000000000000000000000081525060009080519060200190610170929190610273565b5060008054600181600116156101000203166002900480601f0160208091040260200160405190810160405280929190818152602001828054600181600116156101000203166002900480156102075780601f106101dc57610100808354040283529160200191610207565b820191906000526020600020905b8154815290600101906020018083116101ea57829003601f168201915b505050505091508151602083016064f0905080600160006101000a81548173ffffffffffffffffffffffffffffffffffffffff021916908373ffffffffffffffffffffffffffffffffffffffff1602179055505b5050565b602060405190810160405280600081525090565b828054600181600116156101000203166002900490600052602060002090601f016020900481019282601f106102b457805160ff19168380011785556102e2565b828001600101855582156102e2579182015b828111156102e15782518255916020019190600101906102c6565b5b5090506102ef91906102f3565b5090565b61031591905b808211156103115760008160009055506001016102f9565b5090565b905600a165627a7a72305820154b57e8d991c23341604ced78f201aeb717e5d5678ae7c4941fa4eac267dfc60029")['address']
self.node.generate(1)
deployed_contracts = self.node.listcontracts()
# Make sure that the tx was mined
assert_equal(len(self.node.getrawmempool()), 0)
# Make sure that the contract was deployed
assert_equal(deployed_contracts[factory_with_value_contract_address], 0)
# Next, attempt to create the contract via the "create" method
txid = self.node.sendtocontract(factory_with_value_contract_address, "efc81a8c", 1000, 1000000, "0.000001")['txid']
blockhash = self.node.generate(1)[0]
# Make sure that the tx was mined
assert_equal(len(self.node.getrawmempool()), 0)
# Make sure that the contract was NOT created
assert_equal(self.node.listcontracts(), deployed_contracts)
# Make sure that the call to create resulted in an out of gas exception (all gas will have been assigned to the miner)
# The total gas is equal to 1 qtumcash (10^6 * 10^2) + a minor txfee
block = self.node.getblock(blockhash)
coinbase_tx = self.node.decoderawtransaction(self.node.gettransaction(block['tx'][0])['hex'])
assert(coinbase_tx['vout'][0]['value'] >= 20000+1)
# Since the call to the contract threw an out of gas exception the origin contract should have a zero balance
assert_equal(deployed_contracts[factory_with_value_contract_address], 0)
def create_contract_without_value_from_contract_test(self):
# Below we make sure that calls work as expected on contracts created by other contracts.
"""
pragma solidity ^0.4.12;
contract Factory {
address public newAddress;
function create() payable {
newAddress = new Test();
}
function() payable {}
}
contract Test {
uint public check;
function Test() payable {}
function destroy() payable {
suicide(msg.sender);
}
function sendTo(address other, uint value) {
other.transfer(value);
}
function checkOtherAddress(address other) {
address myOwnAddress = Factory(other).newAddress();
if(myOwnAddress == address(this)) {
check = 1;
} else {
check = 2;
}
}
function() payable {}
}
"""
# We create the Factory contract, which will later be used to create the Test contract.
factory_contract_bytecode = "6060604052341561000f57600080fd5b5b6104028061001f6000396000f3006060604052361561004a576000357c0100000000000000000000000000000000000000000000000000000000900463ffffffff168063ccdb3f451461004e578063efc81a8c146100a3575b5b5b005b341561005957600080fd5b6100616100ad565b604051808273ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200191505060405180910390f35b6100ab6100d2565b005b6000809054906101000a900473ffffffffffffffffffffffffffffffffffffffff1681565b6100da610132565b604051809103906000f08015156100f057600080fd5b6000806101000a81548173ffffffffffffffffffffffffffffffffffffffff021916908373ffffffffffffffffffffffffffffffffffffffff1602179055505b565b6040516102948061014383390190560060606040525b5b5b61027e806100166000396000f30060606040523615610060576000357c0100000000000000000000000000000000000000000000000000000000900463ffffffff168063541227e11461006457806383197ef01461009d578063919840ad146100a75780639e1a00aa146100d0575b5b5b005b341561006f57600080fd5b61009b600480803573ffffffffffffffffffffffffffffffffffffffff16906020019091905050610112565b005b6100a56101ec565b005b34156100b257600080fd5b6100ba610207565b6040518082815260200191505060405180910390f35b34156100db57600080fd5b610110600480803573ffffffffffffffffffffffffffffffffffffffff1690602001909190803590602001909190505061020d565b005b60008173ffffffffffffffffffffffffffffffffffffffff1663ccdb3f456000604051602001526040518163ffffffff167c0100000000000000000000000000000000000000000000000000000000028152600401602060405180830381600087803b151561018057600080fd5b6102c65a03f1151561019157600080fd5b5050506040518051905090503073ffffffffffffffffffffffffffffffffffffffff168173ffffffffffffffffffffffffffffffffffffffff1614156101de5760016000819055506101e7565b60026000819055505b5b5050565b3373ffffffffffffffffffffffffffffffffffffffff16ff5b565b60005481565b8173ffffffffffffffffffffffffffffffffffffffff166108fc829081150290604051600060405180830381858888f19350505050151561024d57600080fd5b5b50505600a165627a7a72305820fa7432274a811bb14b3a9182e51715a333ce275636c08ab171c44fa197f20d6c0029a165627a7a723058201d26b91d6e884c90c5d2582dc21c991a7552a6538030c5599f555fb0b7eacd450029"
self.factory_contract_address = self.node.createcontract(factory_contract_bytecode)['address']
self.node.generate(1)
assert_equal(self.node.listcontracts()[self.factory_contract_address], 0)
# Call create(), creating the Test contract
self.node.sendtocontract(self.factory_contract_address, "efc81a8c", 0, 1000000, "0.000001")
self.node.generate(1)
# Fetch the address of the newly created contract via calling the newAddress() method
output = self.node.callcontract(self.factory_contract_address, "ccdb3f45")['executionResult']['output']
self.test_contract_address = output[24:]
assert_equal(self.node.listcontracts()[self.test_contract_address], 0)
def check_value_transfers_from_and_to_contract_test(self):
# Send some coins to the contract
self.node.sendtocontract(self.test_contract_address, "00", 100)
self.node.generate(1)
assert_equal(self.node.listcontracts()[self.test_contract_address], 100)
# Transfer 50 qtumcash from the contract via p2pkh to an address of our choice
receiver_address = self.node.getnewaddress()
h160addr = str(base58_to_byte(receiver_address, 25)[1])[2:-1]
data = "9e1a00aa"
data += h160addr.zfill(64)
data += hex(int(50*COIN))[2:].zfill(64)
self.node.sendtocontract(self.test_contract_address, data)
self.node.generate(1)
assert_equal(self.node.listcontracts()[self.test_contract_address], 50)
self.assert_address_with_value_in_unspents(receiver_address, 50)
# Transfer 50 qtumcash from the contract via OP_CALL to its parent contract (the Factory contract)
receiver_address = self.node.getnewaddress()
h160addr = str(base58_to_byte(receiver_address, 25)[1])[2:-1]
data = "9e1a00aa"
data += self.factory_contract_address.zfill(64)
data += hex(int(50*COIN))[2:].zfill(64)
self.node.sendtocontract(self.test_contract_address, data)
self.node.generate(1)
assert_equal(self.node.listcontracts()[self.test_contract_address], 0)
assert_equal(self.node.listcontracts()[self.factory_contract_address], 50)
def check_calls_to_contract_test(self):
self.node.sendtocontract(self.test_contract_address, "541227e1" + self.factory_contract_address.zfill(64))
self.node.generate(1)
output = self.node.callcontract(self.test_contract_address, "919840ad")['executionResult']['output']
assert_equal(int(output, 16), 1)
def check_suicide_test(self):
sender = self.node.getnewaddress()
self.node.sendtoaddress(sender, 1)
self.node.generate(1)
self.node.sendtocontract(self.test_contract_address, "83197ef0", 0, 1000000, "0.000001", sender)
self.node.generate(1)
# Make sure that the contract is no longer calleable, i.e., does not exist.
assert_raises_rpc_error(-5, "contract address does not exist", self.node.sendtocontract, self.test_contract_address, "00")
def run_test(self):
self.node = self.nodes[0]
self.node.generate(10+COINBASE_MATURITY)
print('Checking that contracts cannot be created from other contracts with a default value')
self.create_contract_with_value_from_contract_test()
print('Checking that contracts can be created from other contract without a default value')
self.create_contract_without_value_from_contract_test()
print('Checking that value transfers via op_call and p2pkh works as expected for the created "subcontract"')
self.check_value_transfers_from_and_to_contract_test()
print('Checking that calls to other contracts works as expected for the created "subcontract"')
self.check_calls_to_contract_test()
print('Checking that suicides works as expected for the created "subcontract"')
self.check_suicide_test()
if __name__ == '__main__':
QtumCashCreateEthOpCodeTest().main()
| 64.540107
| 2,152
| 0.768332
|
5108190a94c6d2f4f523b12b53b6de1c52b1f15e
| 1,929
|
py
|
Python
|
Examples/basic_example.py
|
alexbodn/PikaBus
|
5faf2e48f4d4deecb4428707f94bcf72a81cc3ee
|
[
"MIT"
] | 7
|
2020-03-21T12:22:18.000Z
|
2022-02-10T11:43:51.000Z
|
Examples/basic_example.py
|
alexbodn/PikaBus
|
5faf2e48f4d4deecb4428707f94bcf72a81cc3ee
|
[
"MIT"
] | null | null | null |
Examples/basic_example.py
|
alexbodn/PikaBus
|
5faf2e48f4d4deecb4428707f94bcf72a81cc3ee
|
[
"MIT"
] | 1
|
2021-06-21T10:56:56.000Z
|
2021-06-21T10:56:56.000Z
|
import pika
import datetime
from PikaBus.abstractions.AbstractPikaBus import AbstractPikaBus
from PikaBus.PikaBusSetup import PikaBusSetup
def MessageHandlerMethod(**kwargs):
"""
A message handler method may simply be a method with som **kwargs.
The **kwargs will be given all incoming pipeline data, the bus and the incoming payload.
"""
data: dict = kwargs['data']
bus: AbstractPikaBus = kwargs['bus']
payload: dict = kwargs['payload']
print(payload)
if payload['reply']:
payload['reply'] = False
bus.Reply(payload=payload)
# Use pika connection params to set connection details
credentials = pika.PlainCredentials('amqp', 'amqp')
connParams = pika.ConnectionParameters(
host='localhost',
port=5672,
virtual_host='/',
credentials=credentials)
# Create a PikaBusSetup instance with a listener queue, and add the message handler method.
pikaBusSetup = PikaBusSetup(connParams,
defaultListenerQueue='myQueue',
defaultSubscriptions='myTopic')
pikaBusSetup.AddMessageHandler(MessageHandlerMethod)
# Start consuming messages from the queue.
pikaBusSetup.StartConsumers()
# Create a temporary bus to subscribe on topics and send, defer or publish messages.
bus = pikaBusSetup.CreateBus()
bus.Subscribe('myTopic')
payload = {'hello': 'world!', 'reply': True}
# To send a message means sending a message explicitly to one receiver.
bus.Send(payload=payload, queue='myQueue')
# To defer a message means sending a message explicitly to one receiver with some delay before it is processed.
bus.Defer(payload=payload, delay=datetime.timedelta(seconds=1), queue='myQueue')
# To publish a message means publishing a message on a topic received by any subscribers of the topic.
bus.Publish(payload=payload, topic='myTopic')
input('Hit enter to stop all consuming channels \n\n')
pikaBusSetup.StopConsumers()
| 35.722222
| 111
| 0.734578
|
9adf548c870586fba41ae3ecfca639b063e3ab89
| 885
|
py
|
Python
|
tests/test_212.py
|
sungho-joo/leetcode2github
|
ce7730ef40f6051df23681dd3c0e1e657abba620
|
[
"MIT"
] | null | null | null |
tests/test_212.py
|
sungho-joo/leetcode2github
|
ce7730ef40f6051df23681dd3c0e1e657abba620
|
[
"MIT"
] | null | null | null |
tests/test_212.py
|
sungho-joo/leetcode2github
|
ce7730ef40f6051df23681dd3c0e1e657abba620
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
import pytest
"""
Test 212. Word Search II
"""
@pytest.fixture(scope="session")
def init_variables_212():
from src.leetcode_212_word_search_ii import Solution
solution = Solution()
def _init_variables_212():
return solution
yield _init_variables_212
class TestClass212:
def test_solution_0(self, init_variables_212):
assert (
init_variables_212().findWords(
[
["o", "a", "a", "n"],
["e", "t", "a", "e"],
["i", "h", "k", "r"],
["i", "f", "l", "v"],
],
["oath", "pea", "eat", "rain"],
)
== ["eat", "oath"]
)
def test_solution_1(self, init_variables_212):
assert init_variables_212().findWords([["a", "b"], ["c", "d"]], ["abcb"]) == []
| 22.692308
| 87
| 0.479096
|
9a6bccfd94f7cf0dc190827f885a3a1c09041699
| 2,033
|
py
|
Python
|
DjangoWebProject/urls.py
|
nayak16/TartanHacks-2015
|
e659df9cdd27c69a0619d17d778d578aedc6ecd8
|
[
"MIT"
] | null | null | null |
DjangoWebProject/urls.py
|
nayak16/TartanHacks-2015
|
e659df9cdd27c69a0619d17d778d578aedc6ecd8
|
[
"MIT"
] | null | null | null |
DjangoWebProject/urls.py
|
nayak16/TartanHacks-2015
|
e659df9cdd27c69a0619d17d778d578aedc6ecd8
|
[
"MIT"
] | null | null | null |
"""
Definition of urls for DjangoWebProject.
"""
from datetime import datetime
from django.conf.urls import patterns, url
from app.forms import BootstrapAuthenticationForm
from django.contrib import admin
from django.conf import settings
# Uncomment the next lines to enable the admin:
# from django.conf.urls import include
# from django.contrib import admin
# admin.autodiscover()
urlpatterns = patterns('',
# Examples:
url(r'^$', 'app.views.home', name='home'),
url(r'^index.html', 'app.views.home', name='home'),
url(r'^new_event', 'app.views.new_event'),
url(r'^create_event', 'app.views.create_event'),
url(r'^event_page', 'app.views.redirectBack'),
url(r'^edit_event', 'app.views.edit_event'),
url(r'^log_venmo', 'app.views.log_venmo'),
url(r'^redirect_event/', 'app.views.redirect_event'),
url(r'^event/(?P<event_id>\w+)', 'app.views.display_event'),
url(r'^admin/(?P<event_id>\w+)', 'app.views.admin_event'),
url(r'^confirmation', 'app.views.confirm'),
url(r'^contact$', 'app.views.contact', name='contact'),
url(r'^about', 'app.views.about', name='about'),
url(r'^login/$',
'django.contrib.auth.views.login',
{
'template_name': 'app/login.html',
'authentication_form': BootstrapAuthenticationForm,
'extra_context':
{
'title':'Log in',
'year':datetime.now().year,
}
},
name='login'),
url(r'^logout$',
'django.contrib.auth.views.logout',
{
'next_page': '/',
},
name='logout'),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
# url(r'^admin/', include(admin.site.urls)),
)
if not settings.DEBUG:
urlpatterns += patterns('',
(r'^static/(?P<path>.*)$', 'django.views.static.serve', {'document_root': settings.STATIC_ROOT}),
)
| 33.327869
| 105
| 0.617806
|
b818753a88d1be6628489960769d797686820012
| 866
|
py
|
Python
|
miner/rank.py
|
HeavenDuke/GithubMiner
|
3d14c40c9cbdee6f22e7ade3493888aff708ad5b
|
[
"MIT"
] | 2
|
2017-05-22T10:31:15.000Z
|
2017-05-23T06:52:58.000Z
|
miner/rank.py
|
HeavenDuke/GithubMiner
|
3d14c40c9cbdee6f22e7ade3493888aff708ad5b
|
[
"MIT"
] | null | null | null |
miner/rank.py
|
HeavenDuke/GithubMiner
|
3d14c40c9cbdee6f22e7ade3493888aff708ad5b
|
[
"MIT"
] | null | null | null |
from utils.repositories import fetch_accepted_repository_list
from utils.Time import Time
from py2neo import Graph
from recommendation.ranking import RankingCalculator
import sys
connection = sys.argv[1]
username = sys.argv[2]
password = sys.argv[3]
year = int(sys.argv[4])
month = int(sys.argv[5])
day = int(sys.argv[6])
hour = 0
g = Graph(connection, user = username, password = password)
ar = fetch_accepted_repository_list(g)
RankingCalculator.calculate_ranking(g, repository_list = ar, time = Time(year = year, month = month, day = day, hour = hour), dtype = "Daily")
RankingCalculator.calculate_ranking(g, repository_list = ar, time = Time(year = year, month = month, day = day, hour = hour), dtype = "Weekly")
RankingCalculator.calculate_ranking(g, repository_list = ar, time = Time(year = year, month = month, day = day, hour = hour), dtype = "Monthly")
| 39.363636
| 144
| 0.743649
|
8ee670680f0b3ab418da29ca24722307f807d86d
| 3,917
|
py
|
Python
|
audioIO/srnlp.py
|
kjchavez/jarvis
|
56eb65d959d82e5643797ad16cda9d7d378d4385
|
[
"OML",
"Apache-1.1"
] | null | null | null |
audioIO/srnlp.py
|
kjchavez/jarvis
|
56eb65d959d82e5643797ad16cda9d7d378d4385
|
[
"OML",
"Apache-1.1"
] | null | null | null |
audioIO/srnlp.py
|
kjchavez/jarvis
|
56eb65d959d82e5643797ad16cda9d7d378d4385
|
[
"OML",
"Apache-1.1"
] | null | null | null |
import os
import time
import json
import numpy as np
import apiai
import pyaudio
import pocketsphinx
from audioIO.pause import PauseDetector
class PocketSphinx:
CHUNK = 512
FORMAT = pyaudio.paInt16
CHANNELS = 1
RATE = 16000
MODELDIR = "/usr/local/share/pocketsphinx/model"
config = pocketsphinx.Decoder.default_config()
config.set_string('-hmm', os.path.join(MODELDIR, 'en-us/en-us'))
config.set_string('-lm', os.path.join(MODELDIR, 'en-us/en-us.lm.bin'))
config.set_string('-dict', os.path.join(MODELDIR, 'en-us/cmudict-en-us.dict'))
decoder = pocketsphinx.Decoder(config)
pause_detector = PauseDetector(rel_threshold=0.3, min_pause_length=8)
# Decode streaming data.
decoder = pocketsphinx.Decoder(config)
@staticmethod
def reset():
PocketSphinx.pause_detector.reset()
@staticmethod
def capture_and_process(pa, timeout=None):
PocketSphinx.reset()
PocketSphinx.decoder.start_utt()
stream = pa.open(format=PocketSphinx.FORMAT,
channels=PocketSphinx.CHANNELS,
rate=PocketSphinx.RATE,
input=True,
output=False,
frames_per_buffer=PocketSphinx.CHUNK)
start_time = time.time()
while True:
if timeout is not None:
if time.time() - start_time() > timeout:
break
buf = stream.read(PocketSphinx.CHUNK)
# Check if utterance is over
is_pause = PocketSphinx.pause_detector.process(buf, dtype=np.int16, debug=True)
if is_pause:
print "Detected pause"
break
PocketSphinx.decoder.process_raw(buf, False, False)
print "Done capturing."
stream.stop_stream()
stream.close()
PocketSphinx.decoder.end_utt()
words = [seg.word for seg in PocketSphinx.decoder.seg() if seg.word[0] != '<' ]
return " ".join(words)
class APIAI:
CHUNK = 512
FORMAT = pyaudio.paInt16
CHANNELS = 1
RATE = 44100
CLIENT_ACCESS_TOKEN = 'f720d62e62ef452a85525c816a4e5428'
SUBSCRIBTION_KEY = '5f4a1f26-e151-4b7e-b97f-ce4a6c34b1f6'
ai = apiai.ApiAI(CLIENT_ACCESS_TOKEN, SUBSCRIBTION_KEY)
pause_detector = PauseDetector()
resampler = apiai.Resampler(source_samplerate=RATE)
request = ai.voice_request()
@staticmethod
def callback(in_data, frame_count, time_info, status):
frames, data = APIAI.resampler.resample(in_data, frame_count)
is_pause = APIAI.pause_detector.process(data, dtype=np.int16, debug=True)
APIAI.request.send(data)
if not is_pause:
return in_data, pyaudio.paContinue
else:
return in_data, pyaudio.paComplete
@staticmethod
def reset():
APIAI.pause_detector.reset()
APIAI.request = APIAI.ai.voice_request()
@staticmethod
def capture_and_process(pa, timeout=None):
# Using API.ai
APIAI.reset()
stream = pa.open(format=APIAI.FORMAT,
channels=APIAI.CHANNELS,
rate=APIAI.RATE,
input=True,
output=False,
frames_per_buffer=APIAI.CHUNK,
stream_callback=APIAI.callback)
stream.start_stream()
start_time = time.time()
try:
while stream.is_active():
if timeout is not None:
if time.time() - start_time > timeout:
break
time.sleep(0.1)
except Exception:
raise e
except KeyboardInterrupt:
pass
stream.close()
print ("Wait for response...")
response = APIAI.request.getresponse()
return json.loads(response.read())
| 31.336
| 91
| 0.596885
|
87b662bc43a049cc6c85adcce35dc8b3c7afc725
| 646
|
py
|
Python
|
pyccapt/control/devices_test/counter.py
|
mmonajem/apt_pycontrol
|
3e2413e19adf69af9f90818016c9d5694185a889
|
[
"Apache-2.0"
] | null | null | null |
pyccapt/control/devices_test/counter.py
|
mmonajem/apt_pycontrol
|
3e2413e19adf69af9f90818016c9d5694185a889
|
[
"Apache-2.0"
] | null | null | null |
pyccapt/control/devices_test/counter.py
|
mmonajem/apt_pycontrol
|
3e2413e19adf69af9f90818016c9d5694185a889
|
[
"Apache-2.0"
] | null | null | null |
try:
import nidaqmx
except:
print('Please install nidaqmx')
import time
if __name__ == '__main__':
task_counter = nidaqmx.Task()
task_counter.ci_channels.add_ci_count_edges_chan("Dev1/ctr0")
# if you need to prescale
# task.ci_channels[0].ci_prescaler = 8
# reference the terminal you want to use for the counter here
task_counter.ci_channels[0].ci_count_edges_term = "PFI0"
task_counter.start()
# task.read()
i = 0
for i in range(10):
time.sleep(1)
data = task_counter.read(number_of_samples_per_channel=1)
print(data)
task_counter.stop()
task_counter.close()
| 23.925926
| 65
| 0.678019
|
3425d71b9992973eeb04cdfd8d095ef16a356892
| 137
|
py
|
Python
|
examples/error_messages/app.py
|
feihong/muffin-playground
|
beaeaac15727ac581a75da78b2d973632171c150
|
[
"Apache-2.0"
] | 1
|
2016-10-26T05:14:57.000Z
|
2016-10-26T05:14:57.000Z
|
examples/error_messages/app.py
|
feihong/muffin-playground
|
beaeaac15727ac581a75da78b2d973632171c150
|
[
"Apache-2.0"
] | null | null | null |
examples/error_messages/app.py
|
feihong/muffin-playground
|
beaeaac15727ac581a75da78b2d973632171c150
|
[
"Apache-2.0"
] | null | null | null |
"""
To run:
muffin app run
"""
from muffin_playground import Application
app = Application()
app.register_special_static_route()
| 11.416667
| 41
| 0.737226
|
05821606a1d151a7671ed55ef5d445b9e0e0031e
| 23,421
|
py
|
Python
|
trunk/MOPS_CarType.py
|
n5iln/railmops
|
f7d3b446435b31bad8cddf343f18ca7efb9eac10
|
[
"Unlicense"
] | 1
|
2015-03-30T12:10:56.000Z
|
2015-03-30T12:10:56.000Z
|
trunk/MOPS_CarType.py
|
n5iln/railmops
|
f7d3b446435b31bad8cddf343f18ca7efb9eac10
|
[
"Unlicense"
] | null | null | null |
trunk/MOPS_CarType.py
|
n5iln/railmops
|
f7d3b446435b31bad8cddf343f18ca7efb9eac10
|
[
"Unlicense"
] | null | null | null |
'''
CarType Class This sub-divides Car Classes into cars with the same characteristics for
loading and other purposes
Model Operations Processing System. Copyright Brian Fairbairn 2009-2010. Licenced under the EUPL.
You may not use this work except in compliance with the Licence. You may obtain a copy of the
Licence at http://ec.europa.eu/idabc/eupl or as attached with this application (see Licence file).
Unless required by applicable law or agreed to in writing, software distributed under the Licence
is distributed on an 'AS IS' basis WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either expressed
or implied. See the Licence governing permissions and limitations under the Licence.
Changes:
15/08/2010 Ver 1 Unused variables removed
17/08/2010 Ver 1 Added processing for P-Passenger type cars
'''
import MOPS_Element
class cCarTypes(MOPS_Element.cElement):
"""details about car types. car types contain information about cars, and are linked to car
classes. car types have loading codes which determine loading and unloading availability.
"""
extract_header = 'id|code|name|length|oper mode|capacity|' +\
'unladen weight|loading|unloading|class\n'
extract_code = 'select * from cartype'
def adcart(self, message):
"""adds details of a type of car - length, weight, capacity, unladen weight,
and how it loads/unloads (linked to a loading code). must belong to a car class.
operating mode also required - I-Independent (ie normal car) or part of a
multiple unit (mainly for passenger car sets that operate in multiple units with
a mixture of powered and unpowered cars).
"""
if self.show_access(message,
'ADCART car type;type name;length;capacity;unladen weight;oper mode[I/M];' +\
'^load^;^(un)load^;^car class^', 'S') != 0:
return
errors = 0
#code---------------------------------------------------------------------------------------
cartype, rc = self.extract_field(message, 0, 'CAR TYPE CODE')
if rc > 0:
return
if len(cartype) > self.cartsize:
print('* CAR TYPE CODE ENTERED IS GREATER THAN THE ALLOWED SIZE')
return
if len(cartype) ==0:
print('* NO CAR TYPE CODE ENTERED: A BLANK CODE IS NOT ALLOWED')
return
#check it does not already exist on the database--------------------------------------------
data = (cartype,)
sql = 'select id from cartype where cartype = ?'
count, dummy = self.db_read(sql, data)
if count < 0:
return
if count != 0:
print('* CAR TYPE CODE ALREADY EXISTS')
return
#name---------------------------------------------------------------------------------------
car_type_name, rc = self.extract_field(message, 1, 'CAR TYPE NAME')
if rc > 0:
return
#length-------------------------------------------------------------------------------------
length, rc = self.extract_field(message, 2, 'CAR TYPE LENGTH')
if rc > 0:
return
try:
if int(length) > 99999 or int(length) < 0:
errors = errors + 1
print('* CAR TYPE LENGTH MUST BE IN THE RANGE 0 to 99999')
except:
errors = errors + 1
print('* CAR TYPE LENGTH MUST BE A WHOLE NUMBER')
#capacity-----------------------------------------------------------------------------------
capacity, rc = self.extract_field(message, 3, 'CAR TYPE CAPACITY')
if rc > 0:
return
try:
if int(capacity) > 99999 or int(capacity) < 0:
errors = errors + 1
print('* CAR TYPE CAPACITY MUST BE IN THE RANGE 0 to 99999')
except:
errors = errors + 1
print('* CAR TYPE CAPACITY MUST BE A WHOLE NUMBER')
#unladen weight-----------------------------------------------------------------------------
unladen_weight, rc = self.extract_field(message, 4, 'UNLADEN WEIGHT')
if rc > 0:
return
try:
if int(unladen_weight) > 99999 or int(unladen_weight) < 0:
errors = errors + 1
print('* CAR TYPE UNLADEN WEIGHT MUST BE IN THE RANGE 0 to 99999')
except:
errors = errors + 1
print('* CAR TYPE UNLADEN WEIGHT MUST BE A WHOLE NUMBER')
#car oper type------------------------------------------------------------------------------
car_oper_mode, rc = self.extract_field(message, 5, 'OPERATING MODE')
if rc > 0:
return
if not(car_oper_mode == 'I' or car_oper_mode == 'M' or car_oper_mode == 'P'): #Ver 1
errors = errors + 1
print('* OPERATING MODE MUST BE I-INDEPENDENT M-MULTIPLE UNIT P-PASSENGER')
#loading------------------------------------------------------------------------------------
loading, rc = self.extract_field(message, 6, 'LOADING CODE')
if rc > 0:
return
data = (loading, 'Y')
sql = 'select desc from loading where loading = ? and can_load = ?'
count, ds_loadings = self.db_read(sql, data)
if count < 0:
return
if count == 0:
errors = errors + 1
print('* LOADING CODE ' + loading + ' DOES NOT EXIST OR NOT SET FOR LOADING')
else:
for row in ds_loadings:
loading_desc = row[0]
#unloading----------------------------------------------------------------------------------
unloading, rc = self.extract_field(message, 7, 'UNLOADING CODE')
if rc > 0:
return
data = (unloading, 'Y')
sql = 'select desc from loading where loading = ? and can_unload = ?'
count, ds_loadings = self.db_read(sql, data)
if count < 0:
return
if count == 0:
errors = errors + 1
print('* LOADING CODE ' + unloading + ' DOES NOT EXIST OR NOT SET FOR UNLOADING')
else:
for row in ds_loadings:
unloading_desc = row[0]
#car class----------------------------------------------------------------------------------
carclass, rc = self.extract_field(message, 8, 'CAR CLASS CODE')
if rc > 0:
return
data = (carclass,)
sql = 'select name from carclass where carclass = ?'
count, ds_classes = self.db_read(sql, data)
if count < 0:
return
if count == 0:
errors = errors + 1
print('* CAR CLASS CODE DOES NOT EXIST')
else:
for row in ds_classes:
class_name = row[0]
#carry out the update-----------------------------------------------------------------------
if errors != 0:
return
data = (cartype, car_type_name, length, car_oper_mode, capacity, unladen_weight,
loading, unloading, carclass)
sql = 'insert into cartype values (null, ?, ?, ?, ?, ?, ?, ?, ?, ?)'
if self.db_update(sql, data) != 0:
return
oper_desc = ''
if car_oper_mode == 'I':
oper_desc = 'INDEPENDENT'
if car_oper_mode == 'M':
oper_desc = 'MULTIPLE UNIT'
if car_oper_mode == 'P': #Ver 1
oper_desc = 'PASSENGER' #Ver 1
print('NEW CAR TYPE ADDED SUCCESSFULLY')
print(cartype + car_type_name + carclass + class_name + oper_desc)
print('LENGTH: ' + str(length) + ' CAPACITY: ' + str(capacity) + ' UNLADEN WT: ' +\
str(unladen_weight))
print('LOADING: ' + loading + ' (' + loading_desc.strip() + ') ' + 'UNLOADING:' +\
unloading + ' (' + unloading_desc.strip() + ')')
return
def chcart(self, message):
"""amend details of a type of car - length, weight, capacity, unladen weight,
and how it loads/unloads (linked to a loading code). Must belong to a car class.
Operating mode also required - I-Independent (ie normal car) or part of a
Multiple Unit (mainly for passenger car sets that operate in Multiple Units with
a mixture of powered and unpowered cars).
"""
if self.show_access(message,
'CHCART car type;(type name);(length);(capacity);(unladen weight);' +\
'([I/M]);(^load^);(^(un)load^);(^car class^)', 'S') != 0:
return
errors = 0
#code---------------------------------------------------------------------------------------
cartype, rc = self.extract_field(message, 0, 'CAR TYPE CODE')
if rc > 0:
return
#read the database and populate the fields
data = (cartype,)
sql = 'select name, length, oper_mode, capacity, unladen_weight, loading, unloading, ' +\
'carclass from cartype where cartype = ?'
count, ds_cartypes = self.db_read(sql, data)
if count < 0:
return
if count == 0:
print('* CAR TYPE CODE DOES NOT EXIST')
return
for row in ds_cartypes:
car_type_name = row[0]
length = row[1]
car_oper_mode = row[2]
capacity = row[3]
unladen_weight = row[4]
loading = row[5]
unloading = row[6]
carclass = row[7]
old_carclass = row[7]
#name---------------------------------------------------------------------------------------
value, rc = self.extract_field(message, 1, '')
if rc == 0:
car_type_name = value
#length-------------------------------------------------------------------------------------
value, rc = self.extract_field(message, 2, '')
if rc == 0:
length = value
try:
if int(length) > 99999 or int(length) < 0:
errors = errors + 1
print('* CAR TYPE LENGTH MUST BE IN THE RANGE 0 to 99999')
except:
errors = errors + 1
print('* CAR TYPE LENGTH MUST BE A WHOLE NUMBER')
#capacity-----------------------------------------------------------------------------------
value, rc = self.extract_field(message, 3, '')
if rc == 0:
capacity = value
try:
if int(capacity) > 99999 or int(capacity) < 0:
errors = errors + 1
print('* CAR TYPE CAPACITY MUST BE IN THE RANGE 0 to 99999')
except:
errors = errors + 1
print('* CAR TYPE CAPACITY MUST BE A WHOLE NUMBER')
#unladen weight-----------------------------------------------------------------------------
value, rc = self.extract_field(message, 4, '')
if rc == 0:
unladen_weight = value
try:
if int(unladen_weight) > 99999 or int(unladen_weight) < 0:
errors = errors + 1
print('* CAR TYPE UNLADEN WEIGHT MUST BE IN THE RANGE 0 to 99999')
except:
errors = errors + 1
print('* CAR TYPE UNLADEN WEIGHT MUST BE A WHOLE NUMBER')
#car oper type------------------------------------------------------------------------------
value, rc = self.extract_field(message, 5, '')
if rc == 0:
car_oper_mode = value
if not(car_oper_mode == 'I' or car_oper_mode == 'M' or car_oper_mode == 'P'): #Ver 1
errors = errors + 1
print('* OPERATING MODE MUST BE I-INDEPENDENT M-MULTIPLE UNIT P-PASSENGER')
#loading------------------------------------------------------------------------------------
value, rc = self.extract_field(message, 6, '')
if rc == 0:
loading = value
data = (loading, 'Y')
sql = 'select desc from loading where loading = ? and can_load = ?'
count, ds_loadings = self.db_read(sql, data)
if count < 0:
return
if count == 0:
errors = errors + 1
print('* LOADING CODE ' + loading + ' DOES NOT EXIST OR NOT SET FOR LOADING')
else:
for row in ds_loadings:
loading_desc = row[0]
#unloading----------------------------------------------------------------------------------
value, rc = self.extract_field(message, 7, '')
if rc == 0:
unloading = value
data = (unloading, 'Y')
sql = 'select desc from loading where loading = ? and can_unload = ?'
count, ds_loadings = self.db_read(sql, data)
if count < 0:
return
if count == 0:
errors = errors + 1
print('* LOADING CODE ' + unloading + ' DOES NOT EXIST OR NOT SET FOR UNLOADING')
else:
for row in ds_loadings:
unloading_desc = row[0]
#car class----------------------------------------------------------------------------------
value, rc = self.extract_field(message, 8, '')
if rc == 0:
carclass = value
data = (carclass,)
sql = 'select name from carclass where carclass = ?'
count, ds_classes = self.db_read(sql, data)
if count < 0:
return
if count == 0:
errors = errors + 1
print('* CAR CLASS CODE DOES NOT EXIST')
else:
for row in ds_classes:
class_name = row[0]
#carry out the update-----------------------------------------------------------------------
if errors != 0:
return
data = (car_type_name, length, car_oper_mode, capacity, unladen_weight, loading,
unloading, carclass, cartype)
sql = 'update cartype set name = ?, length = ?, oper_mode = ?, capacity = ?, ' +\
'unladen_weight = ?, loading = ?, unloading = ?, carclass = ? where cartype = ?'
if self.db_update(sql, data) != 0:
return
if carclass != old_carclass:
data = (carclass, old_carclass)
sql = 'update car set carclass = ? where carclass = ?'
if self.db_update(sql, data) != 0:
return
if car_oper_mode == 'I':
oper_desc = 'INDEPENDENT'
elif car_oper_mode == 'M':
oper_desc = 'MULTIPLE UNIT'
elif car_oper_mode == 'P': #Ver 1
oper_desc = 'PASSENGER' #Ver 1
else:
oper_desc = ''
print('CAR TYPE DETAILS CHANGED SUCCESSFULLY')
print(cartype + car_type_name + carclass + class_name + oper_desc)
print('LENGTH: ' + str(length) + ' CAPACITY: ' + str(capacity) + ' UNLADEN WT: ' +\
str(unladen_weight))
print('LOADING:' + loading + ' (' + loading_desc.strip() + ') ' + 'UNLOADING:' +\
unloading + ' (' + unloading_desc.strip() + ')')
return
def dxcart(self, message):
"""deletes a car type from the list. checks that a car does not refer to it
"""
if self.show_access(message, 'DXCART car type', 'S') != 0:
return
#code---------------------------------------------------------------------------------------
cartype, rc = self.extract_field(message, 0, 'CAR TYPE CODE')
if rc > 0:
return
data = (cartype,)
#validate the change------------------------------------------------------------------------
sql = 'select id from cartype where cartype = ?'
count, dummy = self.db_read(sql, data)
if count < 0:
return
if count == 0:
print('* CAR TYPE CODE DOES NOT EXIST')
return
#make sure that there is not a car linked to the cartype------------------------------------
sql = 'select id from car where cartype = ?'
count, dummy = self.db_read(sql, data)
if count < 0:
return
if count > 0:
print('* CARS BELONG TO THIS CAR TYPE - CANNOT DELETE')
return
#process the change-------------------------------------------------------------------------
if self.db_update('delete from cartype where cartype = ?', data) == 0:
print('CAR TYPE ' + cartype + ' SUCCESSFULLY DELETED')
return
def licart(self, message):
"""returns a list of cars. Sortable by code or name
"""
if self.show_access(message, 'LICART (sort[0/1])', 'R') != 0:
return
#status
value, rc = self.extract_field(message, 0, '')
if rc == 0:
sort_order = value
else:
sort_order = ''
#class
value, rc = self.extract_field(message, 1, '')
if rc == 0:
class_filter = value
else:
class_filter = ''
# build the column titles
class_name = 80 - self.cartsize - 2 * self.loadsize - self.classize - 16 - 8
if class_name > 30:
class_name = 30
titles = self.x_field('TYPE======', self.cartsize) + ' ' +\
self.x_field('NAME==========================', class_name) + ' ' +\
self.x_field('LNGTH', 5) + ' ' +\
self.x_field('CAPTY', 5) + ' ' +\
self.x_field('U/WT=', 5) + ' ' +\
self.x_field('O', 1) + ' ' +\
self.x_field('LOADING===', self.loadsize) + ' ' +\
self.x_field('UNLOADING=', self.loadsize) + ' ' +\
self.x_field('CLASS=====', self.classize)
# get the extract data
if sort_order == '1':
sql = 'select cartype, name, length, oper_mode, capacity, unladen_weight, loading, ' +\
'unloading, carclass from cartype order by name'
else:
sql = 'select cartype, name, length, oper_mode, capacity, unladen_weight, loading, ' +\
'unloading, carclass from cartype order by carclass'
count, ds_cartype = self.db_read(sql, '')
if count < 0:
return
#report the extracted data
line_count = 0
records = 0
for row in ds_cartype:
car_class = row[8]
if line_count == 0:
print(titles)
if class_filter == '' or class_filter == car_class:
print(self.x_field(row[0], self.cartsize) + " " +
self.x_field(row[1], class_name) + " " +
self.x_field(row[2], 5, 'R') + " " +
self.x_field(row[4], 5, 'R') + " " +
self.x_field(row[5], 5, 'R') + " " +
self.x_field(row[3], 1) + " " +
self.x_field(row[6], self.loadsize) + " " +
self.x_field(row[7], self.loadsize) + " " +
self.x_field(row[8], self.classize))
records = records + 1
line_count = line_count + 1
if line_count > 20:
line_count = 0
reply = raw_input('+')
if reply == 'x':
break
print(' ** END OF DATA:' + str(records) + ' RECORDS DISPLAYED **')
return
def prcart(self, message, Params):
"""prints a list of cars. Sortable by code or name
"""
if self.show_access(message, 'PRCART (sort[0/1])', 'R') != 0:
return
#status
value, rc = self.extract_field(message, 0, '')
if rc == 0:
sort_order = value
else:
sort_order = ''
#class
value, rc = self.extract_field(message, 1, '')
if rc == 0:
class_filter = value
else:
class_filter = ''
# build the column titles
class_name = 80 - self.cartsize - 2 * self.loadsize - self.classize - 16 - 8
if class_name > 30:
class_name = 30
titles = self.x_field('TYPE======', self.cartsize) + ' ' +\
self.x_field('NAME==========================', class_name) + ' ' +\
self.x_field('LNGTH', 5) + ' ' +\
self.x_field('CAPTY', 5) + ' ' +\
self.x_field('U/WT=', 5) + ' ' +\
self.x_field('O', 1) + ' ' +\
self.x_field('LOADING===', self.loadsize) + ' ' +\
self.x_field('UNLOADING=', self.loadsize) + ' ' +\
self.x_field('CLASS=====', self.classize)
# get the extract data
if sort_order == '1':
sql = 'select cartype, name, length, oper_mode, capacity, unladen_weight, loading, ' +\
'unloading, carclass from cartype order by name'
else:
sql = 'select cartype, name, length, oper_mode, capacity, unladen_weight, loading, ' +\
'unloading, carclass from cartype order by carclass'
count, ds_cartype = self.db_read(sql, '')
if count < 0:
return
#report the extracted data
self.temp = {}
for row in ds_cartype:
car_class = row[8]
if class_filter == '' or class_filter == car_class:
print_line = self.x_field(row[0], self.cartsize) + ' ' +\
self.x_field(row[1], class_name) + ' ' +\
self.x_field(row[2], 5, 'R') + ' ' +\
self.x_field(row[4], 5, 'R') + ' ' +\
self.x_field(row[5], 5, 'R') + ' ' +\
self.x_field(row[3], 1) + ' ' +\
self.x_field(row[6], self.loadsize) + ' ' +\
self.x_field(row[7], self.loadsize) + ' ' +\
self.x_field(row[8], self.classize)
if sort_order == '1':
self.temp[row[1]] = print_line
else:
self.temp[row[0]] = print_line
#report the extracted data
self.print_report (titles = titles,
report_id = 'PRCART',
report_name = 'LIST OF CAR TYPES',
Params = Params)
return
| 41.234155
| 106
| 0.450792
|
9a206e93eff36794daf68c2e9e830dfbe8071dac
| 6,612
|
py
|
Python
|
release/stubs.min/System/Diagnostics/__init__.py
|
tranconbv/ironpython-stubs
|
a601759e6c6819beff8e6b639d18a24b7e351851
|
[
"MIT"
] | null | null | null |
release/stubs.min/System/Diagnostics/__init__.py
|
tranconbv/ironpython-stubs
|
a601759e6c6819beff8e6b639d18a24b7e351851
|
[
"MIT"
] | null | null | null |
release/stubs.min/System/Diagnostics/__init__.py
|
tranconbv/ironpython-stubs
|
a601759e6c6819beff8e6b639d18a24b7e351851
|
[
"MIT"
] | null | null | null |
# encoding: utf-8
# module System.Diagnostics calls itself Diagnostics
# from mscorlib,Version=4.0.0.0,Culture=neutral,PublicKeyToken=b77a5c561934e089,System,Version=4.0.0.0,Culture=neutral,PublicKeyToken=b77a5c561934e089
# by generator 1.145
# no doc
# no important
# no functions
# classes
from __init___parts.Switch import Switch
from __init___parts.BooleanSwitch import BooleanSwitch
from __init___parts.ConditionalAttribute import ConditionalAttribute
from __init___parts.TraceListener import TraceListener
from __init___parts.TextWriterTraceListener import TextWriterTraceListener
from __init___parts.ConsoleTraceListener import ConsoleTraceListener
from __init___parts.CorrelationManager import CorrelationManager
from __init___parts.CounterCreationData import CounterCreationData
from __init___parts.CounterCreationDataCollection import CounterCreationDataCollection
from __init___parts.CounterSample import CounterSample
from __init___parts.CounterSampleCalculator import CounterSampleCalculator
from __init___parts.DataReceivedEventArgs import DataReceivedEventArgs
from __init___parts.DataReceivedEventHandler import DataReceivedEventHandler
from __init___parts.Debug import Debug
from __init___parts.DebuggableAttribute import DebuggableAttribute
from __init___parts.Debugger import Debugger
from __init___parts.DebuggerBrowsableAttribute import DebuggerBrowsableAttribute
from __init___parts.DebuggerBrowsableState import DebuggerBrowsableState
from __init___parts.DebuggerDisplayAttribute import DebuggerDisplayAttribute
from __init___parts.DebuggerHiddenAttribute import DebuggerHiddenAttribute
from __init___parts.DebuggerNonUserCodeAttribute import DebuggerNonUserCodeAttribute
from __init___parts.DebuggerStepperBoundaryAttribute import DebuggerStepperBoundaryAttribute
from __init___parts.DebuggerStepThroughAttribute import DebuggerStepThroughAttribute
from __init___parts.DebuggerTypeProxyAttribute import DebuggerTypeProxyAttribute
from __init___parts.DebuggerVisualizerAttribute import DebuggerVisualizerAttribute
from __init___parts.DefaultTraceListener import DefaultTraceListener
from __init___parts.DelimitedListTraceListener import DelimitedListTraceListener
from __init___parts.DiagnosticsConfigurationHandler import DiagnosticsConfigurationHandler
from __init___parts.EntryWrittenEventArgs import EntryWrittenEventArgs
from __init___parts.EntryWrittenEventHandler import EntryWrittenEventHandler
from __init___parts.EventInstance import EventInstance
from __init___parts.EventLog import EventLog
from __init___parts.EventLogEntry import EventLogEntry
from __init___parts.EventLogEntryCollection import EventLogEntryCollection
from __init___parts.EventLogEntryType import EventLogEntryType
from __init___parts.EventLogPermission import EventLogPermission
from __init___parts.EventLogPermissionAccess import EventLogPermissionAccess
from __init___parts.EventLogPermissionAttribute import EventLogPermissionAttribute
from __init___parts.EventLogPermissionEntry import EventLogPermissionEntry
from __init___parts.EventLogPermissionEntryCollection import EventLogPermissionEntryCollection
from __init___parts.EventLogTraceListener import EventLogTraceListener
from __init___parts.EventSourceCreationData import EventSourceCreationData
from __init___parts.TraceFilter import TraceFilter
from __init___parts.EventTypeFilter import EventTypeFilter
from __init___parts.FileVersionInfo import FileVersionInfo
from __init___parts.ICollectData import ICollectData
from __init___parts.InstanceData import InstanceData
from __init___parts.InstanceDataCollection import InstanceDataCollection
from __init___parts.InstanceDataCollectionCollection import InstanceDataCollectionCollection
from __init___parts.MonitoringDescriptionAttribute import MonitoringDescriptionAttribute
from __init___parts.OverflowAction import OverflowAction
from __init___parts.PerformanceCounter import PerformanceCounter
from __init___parts.PerformanceCounterCategory import PerformanceCounterCategory
from __init___parts.PerformanceCounterCategoryType import PerformanceCounterCategoryType
from __init___parts.PerformanceCounterInstanceLifetime import PerformanceCounterInstanceLifetime
from __init___parts.PerformanceCounterManager import PerformanceCounterManager
from __init___parts.PerformanceCounterPermission import PerformanceCounterPermission
from __init___parts.PerformanceCounterPermissionAccess import PerformanceCounterPermissionAccess
from __init___parts.PerformanceCounterPermissionAttribute import PerformanceCounterPermissionAttribute
from __init___parts.PerformanceCounterPermissionEntry import PerformanceCounterPermissionEntry
from __init___parts.PerformanceCounterPermissionEntryCollection import PerformanceCounterPermissionEntryCollection
from __init___parts.PerformanceCounterType import PerformanceCounterType
from __init___parts.Process import Process
from __init___parts.ProcessModule import ProcessModule
from __init___parts.ProcessModuleCollection import ProcessModuleCollection
from __init___parts.ProcessPriorityClass import ProcessPriorityClass
from __init___parts.ProcessStartInfo import ProcessStartInfo
from __init___parts.ProcessThread import ProcessThread
from __init___parts.ProcessThreadCollection import ProcessThreadCollection
from __init___parts.ProcessWindowStyle import ProcessWindowStyle
from __init___parts.SourceFilter import SourceFilter
from __init___parts.SourceLevels import SourceLevels
from __init___parts.SourceSwitch import SourceSwitch
from __init___parts.StackFrame import StackFrame
from __init___parts.StackFrameExtensions import StackFrameExtensions
from __init___parts.StackTrace import StackTrace
from __init___parts.Stopwatch import Stopwatch
from __init___parts.SwitchAttribute import SwitchAttribute
from __init___parts.SwitchLevelAttribute import SwitchLevelAttribute
from __init___parts.ThreadPriorityLevel import ThreadPriorityLevel
from __init___parts.ThreadState import ThreadState
from __init___parts.ThreadWaitReason import ThreadWaitReason
from __init___parts.Trace import Trace
from __init___parts.TraceEventCache import TraceEventCache
from __init___parts.TraceEventType import TraceEventType
from __init___parts.TraceLevel import TraceLevel
from __init___parts.TraceListenerCollection import TraceListenerCollection
from __init___parts.TraceOptions import TraceOptions
from __init___parts.TraceSource import TraceSource
from __init___parts.TraceSwitch import TraceSwitch
from __init___parts.XmlWriterTraceListener import XmlWriterTraceListener
| 65.465347
| 151
| 0.907139
|
a8e3f1532e4b59940e70617091eb99c94471de1e
| 2,177
|
py
|
Python
|
pubsub/messages.py
|
raymondpoling/rerun-tv
|
e10122ecad12f8ec427c28317db018be57548f60
|
[
"MIT"
] | null | null | null |
pubsub/messages.py
|
raymondpoling/rerun-tv
|
e10122ecad12f8ec427c28317db018be57548f60
|
[
"MIT"
] | 26
|
2020-03-21T18:26:18.000Z
|
2020-06-23T13:33:01.000Z
|
pubsub/messages.py
|
raymondpoling/rerun-tv
|
e10122ecad12f8ec427c28317db018be57548f60
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python3.6
'''Main application, gets message from redis, and processes the test to run.'''
import os
import sys
import traceback
import json
import redis
import schedule
import root_locations
import remote_locations
import series_playlists
import playlist_check
import ensure_tags
REDIS_SERVER = os.environ.get("REDIS_SERVER")
REDIS_PORT = os.environ.get("REDIS_PORT")
def create_redis():
'''Create redis connection.'''
return redis.Redis(host=REDIS_SERVER, port=REDIS_PORT)
def subscribe(redis_conn):
'''Subscribe to exception.'''
pubsub = redis_conn.pubsub()
pubsub.subscribe("exception")
return pubsub
def select_test(name):
'''Select the test to run.'''
return {
schedule.TEST: schedule.run,
root_locations.TEST: root_locations.run,
remote_locations.TEST: remote_locations.run,
series_playlists.TEST: series_playlists.run,
playlist_check.TEST: playlist_check.run,
ensure_tags.TEST: ensure_tags.run
}.get(name, lambda x: print("Test not found: " + name +
"\n\targs: " + str(x)))
def main():
'''Run the application. Get a message from redis, and run test with
args.'''
redis_conn = create_redis()
pubsub = subscribe(redis_conn)
for new_message in pubsub.listen():
print("message: " + str(new_message))
message = {}
try:
message = new_message['data'].decode('utf-8')
j = json.loads(message)
test = j['test']
arguments = j['args']
print("testing: " + str(test) + " args: " + str(arguments))
selected = select_test(test)
print("selected? " + str(selected))
selected(arguments)
print("??? " + str(j), flush=True)
except BaseException:
info = sys.exc_info()
print("Cannot process: " + str(new_message['data']) +
"\n\t" + str(info[0]))
traceback.print_exception(info[0], info[1], info[2],
file=sys.stdout)
finally:
print("", flush=True)
if __name__ == "__main__":
main()
| 30.661972
| 79
| 0.604042
|
23aaf45845cec857d9f323a7a0a4d4d64d3c5d4d
| 1,452
|
py
|
Python
|
app/views/crops.py
|
aldrinjao/sarai-interactive-maps-backend
|
ebe8ec54d90c3cd2701c9fd80abffc0f73404bf0
|
[
"MIT"
] | null | null | null |
app/views/crops.py
|
aldrinjao/sarai-interactive-maps-backend
|
ebe8ec54d90c3cd2701c9fd80abffc0f73404bf0
|
[
"MIT"
] | null | null | null |
app/views/crops.py
|
aldrinjao/sarai-interactive-maps-backend
|
ebe8ec54d90c3cd2701c9fd80abffc0f73404bf0
|
[
"MIT"
] | null | null | null |
# crops.py
#
# Copyright(c) Exequiel Ceasar Navarrete <esnavarrete1@up.edu.ph>
# Licensed under MIT
# Version 1.0.0-alpha6
from flask import Blueprint, jsonify, abort
from flask_cors import cross_origin
from app.gzipped import gzipped
from app.models import Crop
from app.schema import CropSchema
mod = Blueprint('crops', __name__, url_prefix='/crops')
@mod.route('/', methods=['GET'])
@gzipped
@cross_origin()
def index():
crop = Crop.query.all()
response = {
'success': True
}
crop_schema = CropSchema(many=True)
result = crop_schema.dump(crop)
response['result'] = result.data
return jsonify(response)
@mod.route('/<crop_id>', methods=['GET'])
@gzipped
@cross_origin()
def by_id(crop_id):
crop = Crop.query.get(crop_id)
response = {
'success': True
}
# invoke the page not found handler when crop is not found
if crop is None:
abort(404, 'Crop not found')
crop_schema = CropSchema()
result = crop_schema.dump(crop)
response['result'] = result.data
return jsonify(response)
@mod.route('/slug/<slug>', methods=['GET'])
@gzipped
@cross_origin()
def by_slug(slug):
crop = Crop.query.filter_by(slug=slug).first()
response = {
'success': True
}
# invoke the page not found handler when crop is not found
if crop is None:
abort(404, 'Crop not found')
crop_schema = CropSchema()
result = crop_schema.dump(crop)
response['result'] = result.data
return jsonify(response)
| 20.166667
| 65
| 0.694904
|
6ffe55a04cc3bd5b3938ea8512f5ec05886656a4
| 720
|
py
|
Python
|
cert_issuer/__main__.py
|
bloxberg-org/cert-issuer
|
5dbdefaa8423ebf8dffb95c6492fc30ddebfd335
|
[
"MIT"
] | null | null | null |
cert_issuer/__main__.py
|
bloxberg-org/cert-issuer
|
5dbdefaa8423ebf8dffb95c6492fc30ddebfd335
|
[
"MIT"
] | null | null | null |
cert_issuer/__main__.py
|
bloxberg-org/cert-issuer
|
5dbdefaa8423ebf8dffb95c6492fc30ddebfd335
|
[
"MIT"
] | 1
|
2021-01-14T10:40:31.000Z
|
2021-01-14T10:40:31.000Z
|
#!/usr/bin/env python3
import os.path
import sys
import cProfile
PATH = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
if __package__ is None and not hasattr(sys, 'frozen'):
path = os.path.realpath(os.path.abspath(__file__))
sys.path.insert(0, os.path.dirname(os.path.dirname(path)))
def cert_issuer_main(args=None):
from cert_issuer import config
parsed_config = config.get_config()
from cert_issuer import issue_certificates
pr = cProfile.Profile()
pr.enable()
issue_certificates.main(parsed_config)
pr.disable()
# after your program ends
pr.print_stats(sort="tottime")
pr.dump_stats('profile.pstat')
if __name__ == '__main__':
cert_issuer_main()
| 25.714286
| 66
| 0.720833
|
5ee69da5489329bd879c3003ba3f808fc5de226a
| 3,420
|
py
|
Python
|
build_msvc/msvc-autogen.py
|
cisnes/PINECOIN
|
a0252cace17ecc1208a07368c0b893d3878459d8
|
[
"MIT"
] | null | null | null |
build_msvc/msvc-autogen.py
|
cisnes/PINECOIN
|
a0252cace17ecc1208a07368c0b893d3878459d8
|
[
"MIT"
] | null | null | null |
build_msvc/msvc-autogen.py
|
cisnes/PINECOIN
|
a0252cace17ecc1208a07368c0b893d3878459d8
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
import os
import re
import argparse
from shutil import copyfile
SOURCE_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', 'src'))
DEFAULT_PLATFORM_TOOLSET = R'v141'
libs = [
'libpinecoin_cli',
'libpinecoin_common',
'libpinecoin_crypto',
'libpinecoin_server',
'libpinecoin_util',
'libpinecoin_wallet_tool',
'libpinecoin_wallet',
'libpinecoin_zmq',
]
ignore_list = [
]
lib_sources = {}
def parse_makefile(makefile):
with open(makefile, 'r', encoding='utf-8') as file:
current_lib = ''
for line in file.read().splitlines():
if current_lib:
source = line.split()[0]
if source.endswith('.cpp') and not source.startswith('$') and source not in ignore_list:
source_filename = source.replace('/', '\\')
object_filename = source.replace('/', '_')[:-4] + ".obj"
lib_sources[current_lib].append((source_filename, object_filename))
if not line.endswith('\\'):
current_lib = ''
continue
for lib in libs:
_lib = lib.replace('-', '_')
if re.search(_lib + '.*_SOURCES \\= \\\\', line):
current_lib = lib
lib_sources[current_lib] = []
break
def set_common_properties(toolset):
with open(os.path.join(SOURCE_DIR, '../build_msvc/common.init.vcxproj'), 'r', encoding='utf-8') as rfile:
s = rfile.read()
s = re.sub('<PlatformToolset>.*?</PlatformToolset>', '<PlatformToolset>'+toolset+'</PlatformToolset>', s)
with open(os.path.join(SOURCE_DIR, '../build_msvc/common.init.vcxproj'), 'w', encoding='utf-8',newline='\n') as wfile:
wfile.write(s)
def main():
parser = argparse.ArgumentParser(description='PineCoin-core msbuild configuration initialiser.')
parser.add_argument('-toolset', nargs='?',help='Optionally sets the msbuild platform toolset, e.g. v142 for Visual Studio 2019.'
' default is %s.'%DEFAULT_PLATFORM_TOOLSET)
args = parser.parse_args()
if args.toolset:
set_common_properties(args.toolset)
for makefile_name in os.listdir(SOURCE_DIR):
if 'Makefile' in makefile_name:
parse_makefile(os.path.join(SOURCE_DIR, makefile_name))
for key, value in lib_sources.items():
vcxproj_filename = os.path.abspath(os.path.join(os.path.dirname(__file__), key, key + '.vcxproj'))
content = ''
for source_filename, object_filename in value:
content += ' <ClCompile Include="..\\..\\src\\' + source_filename + '">\n'
content += ' <ObjectFileName>$(IntDir)' + object_filename + '</ObjectFileName>\n'
content += ' </ClCompile>\n'
with open(vcxproj_filename + '.in', 'r', encoding='utf-8') as vcxproj_in_file:
with open(vcxproj_filename, 'w', encoding='utf-8') as vcxproj_file:
vcxproj_file.write(vcxproj_in_file.read().replace(
'@SOURCE_FILES@\n', content))
copyfile(os.path.join(SOURCE_DIR,'../build_msvc/pinecoin_config.h'), os.path.join(SOURCE_DIR, 'config/pinecoin-config.h'))
copyfile(os.path.join(SOURCE_DIR,'../build_msvc/libsecp256k1_config.h'), os.path.join(SOURCE_DIR, 'secp256k1/src/libsecp256k1-config.h'))
if __name__ == '__main__':
main()
| 41.707317
| 141
| 0.615497
|
9b4adb447ae02f3cb45509daa402f87c869cdfb4
| 430
|
py
|
Python
|
tests/strategies/migrations.py
|
sc-gcoste/brightway2-io
|
3a1f6d4efeb1b1754343ca7a243c4c1bcaaca40a
|
[
"BSD-3-Clause"
] | null | null | null |
tests/strategies/migrations.py
|
sc-gcoste/brightway2-io
|
3a1f6d4efeb1b1754343ca7a243c4c1bcaaca40a
|
[
"BSD-3-Clause"
] | 3
|
2020-03-10T11:08:18.000Z
|
2020-03-10T11:09:00.000Z
|
tests/strategies/migrations.py
|
brightway-lca/brightway2-io-copy
|
8383adc2f0cb06852f689fb2aab62d5a29f41130
|
[
"BSD-3-Clause"
] | null | null | null |
import pytest
from bw2data.tests import bw2test
from bw2io.errors import MissingMigration
from bw2io.strategies import migrate_datasets, migrate_exchanges
@bw2test
def test_migrate_exchanges_missing_migration():
with pytest.raises(MissingMigration):
migrate_exchanges([], "foo")
@bw2test
def test_migrate_datasets_missing_migration():
with pytest.raises(MissingMigration):
migrate_datasets([], "foo")
| 23.888889
| 64
| 0.786047
|
75444c93fa48a93071927879cd5f087148fce54e
| 917
|
py
|
Python
|
powercmd/commands_dict.py
|
dextero/powercmd
|
6d3652e9d1a60d7227e95ce943a9d3a6ec6a25bf
|
[
"MIT"
] | null | null | null |
powercmd/commands_dict.py
|
dextero/powercmd
|
6d3652e9d1a60d7227e95ce943a9d3a6ec6a25bf
|
[
"MIT"
] | 8
|
2017-06-13T15:27:09.000Z
|
2020-08-19T19:11:08.000Z
|
powercmd/commands_dict.py
|
dextero/powercmd
|
6d3652e9d1a60d7227e95ce943a9d3a6ec6a25bf
|
[
"MIT"
] | 4
|
2017-06-13T15:01:10.000Z
|
2020-08-05T10:00:20.000Z
|
"""
Command name -> Command object dictionary able to choose most appropriate
command by partial name.
"""
from powercmd.command import Command
from powercmd.exceptions import InvalidInput
from powercmd.match_string import match_string
class CommandsDict(dict):
"""
A container for Command objects that allows accessing them by name.
Functionally, Mapping[str, Command].
"""
def choose(self,
short_cmd: str,
verbose: bool = False) -> Command:
"""Returns a command handler that matches SHORT_CMD."""
matches = match_string(short_cmd, self, verbose=verbose)
if not matches:
raise InvalidInput('no such command: %s' % (short_cmd,))
if len(matches) > 1:
raise InvalidInput('ambigious command: %s (possible: %s)'
% (short_cmd, ' '.join(matches)))
return self[matches[0]]
| 30.566667
| 73
| 0.635769
|
4aecd6240556e59d090cc002c5fa8592212d7805
| 2,094
|
py
|
Python
|
tap_github/tests/fixtures.py
|
oviohub/tap-github-1
|
40edd0536a4080a3d9a67cc264282d76e4bc1c46
|
[
"Apache-2.0"
] | 5
|
2021-09-03T23:06:35.000Z
|
2022-01-23T15:05:02.000Z
|
tap_github/tests/fixtures.py
|
oviohub/tap-github-1
|
40edd0536a4080a3d9a67cc264282d76e4bc1c46
|
[
"Apache-2.0"
] | 91
|
2021-09-03T21:04:37.000Z
|
2022-03-31T16:37:02.000Z
|
tap_github/tests/fixtures.py
|
oviohub/tap-github-meltano
|
40edd0536a4080a3d9a67cc264282d76e4bc1c46
|
[
"Apache-2.0"
] | 3
|
2021-09-10T09:28:04.000Z
|
2021-12-09T20:43:20.000Z
|
import datetime
import pytest
@pytest.fixture
def search_config():
return {
"metrics_log_level": "none",
"start_date": datetime.datetime.now(datetime.timezone.utc).strftime("%Y-%m-%d"),
"searches": [
{
"name": "tap_something",
"query": "tap-+language:Python",
}
],
}
@pytest.fixture
def repo_list_config(request):
"""
Get a default list of repos or pass your own by decorating your test with
@pytest.mark.repo_list(['org1/repo1', 'org2/repo2'])
"""
marker = request.node.get_closest_marker("repo_list")
if marker is None:
repo_list = ["octocat/hello-world", "mapswipe/mapswipe"]
else:
repo_list = marker.args[0]
return {
"metrics_log_level": "none",
"start_date": datetime.datetime.now(datetime.timezone.utc).strftime("%Y-%m-%d"),
"repositories": repo_list,
}
@pytest.fixture
def usernames_list_config(request):
"""
Get a default list of usernames or pass your own by decorating your test with
@pytest.mark.usernames_list(['ericboucher', 'aaronsteers'])
"""
marker = request.node.get_closest_marker("usernames_list")
if marker is None:
usernames_list = ["ericboucher", "aaronsteers"]
else:
usernames_list = marker.args[0]
return {
"metrics_log_level": "none",
"start_date": datetime.datetime.now(datetime.timezone.utc).strftime("%Y-%m-%d"),
"user_usernames": usernames_list,
}
@pytest.fixture
def user_ids_list_config(request):
"""
Get a default list of usernames or pass your own by decorating your test with
@pytest.mark.user_ids_list(['ericboucher', 'aaronsteers'])
"""
marker = request.node.get_closest_marker("user_ids_list")
if marker is None:
user_ids_list = [1, 2]
else:
user_ids_list = marker.args[0]
return {
"metrics_log_level": "none",
"start_date": datetime.datetime.now(datetime.timezone.utc).strftime("%Y-%m-%d"),
"user_ids": user_ids_list,
}
| 27.92
| 88
| 0.626552
|
1750b34adbea5a01029f5daacea4d3ab39e89c0c
| 3,708
|
py
|
Python
|
aiida/backends/sqlalchemy/alembic_manage.py
|
tomzhang/aiida_core
|
949810e9f3daff0f748c5c9aa1dde4f5222bb49b
|
[
"BSD-2-Clause"
] | 1
|
2019-04-29T12:39:31.000Z
|
2019-04-29T12:39:31.000Z
|
aiida/backends/sqlalchemy/alembic_manage.py
|
tomzhang/aiida_core
|
949810e9f3daff0f748c5c9aa1dde4f5222bb49b
|
[
"BSD-2-Clause"
] | null | null | null |
aiida/backends/sqlalchemy/alembic_manage.py
|
tomzhang/aiida_core
|
949810e9f3daff0f748c5c9aa1dde4f5222bb49b
|
[
"BSD-2-Clause"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
###########################################################################
# Copyright (c), The AiiDA team. All rights reserved. #
# This file is part of the AiiDA code. #
# #
# The code is hosted on GitHub at https://github.com/aiidateam/aiida_core #
# For further information on the license, see the LICENSE.txt file #
# For further information please visit http://www.aiida.net #
###########################################################################
from __future__ import absolute_import
from __future__ import print_function
import sys
# Available alembic commands
REVISION_CMD = 'revision'
CURRENT_CMD = 'current'
HISTORY_CMD = 'history'
UPGRADE_CMD = 'upgrade'
DOWNGRADE_CMD = 'downgrade'
AVAIL_AL_COMMANDS = [REVISION_CMD, CURRENT_CMD, HISTORY_CMD,
UPGRADE_CMD, DOWNGRADE_CMD]
if __name__ == "__main__":
import argparse
from aiida.backends.sqlalchemy.utils import alembic_command
from aiida.backends.profile import load_profile
from aiida.backends.sqlalchemy.utils import _load_dbenv_noschemacheck
from aiida.backends.profile import BACKEND_SQLA
from aiida.common.exceptions import InvalidOperation
parser = argparse.ArgumentParser()
parser.add_argument(
'--aiida-profile', help='The AiiDA profile that you would like to use')
subparsers = parser.add_subparsers(
help='sub-command help', dest='command')
parser_upg = subparsers.add_parser(
'upgrade', help='Upgrade to a later version')
parser_upg.add_argument(
'arguments', choices=['head'], help='Upgrade to head')
parser_dg = subparsers.add_parser(
'downgrade', help='Revert to a previous version')
parser_dg.add_argument(
'arguments', choices=['base'], help='Revert to base')
parser_hist = subparsers.add_parser(
'history', help='List changeset scripts in chronological order')
parser_hist.add_argument(
'arguments', choices=['verbose'], nargs='?',
help='Output in verbose mode')
parser_cur = subparsers.add_parser(
'current', help='Display the current version for a database')
parser_cur.add_argument(
'arguments', choices=['verbose'], nargs='?',
help='Output in verbose mode')
parser_rev = subparsers.add_parser(
'revision', help='Create a new migration file')
parser_rev.add_argument(
'arguments', nargs=1, help='Migration message')
args = parser.parse_args(sys.argv[1:])
if args.command in AVAIL_AL_COMMANDS:
# Use the default profile if not specified
profile_name = args.aiida_profile
# Perform the same loading procedure as the normal load_dbenv does
from aiida.backends import settings
settings.LOAD_DBENV_CALLED = True
# We load the needed profile.
# This is going to set global variables in settings, including
# settings.BACKEND
load_profile(profile=profile_name)
if settings.BACKEND != BACKEND_SQLA:
raise InvalidOperation("A SQLAlchemy (alembic) revision "
"generation procedure is initiated "
"but a different backend is used!")
_load_dbenv_noschemacheck(profile=profile_name)
if 'arguments' in args:
alembic_command(args.command, args.arguments)
else:
alembic_command(args.command)
else:
print("No valid command specified. The available commands are: " + str(AVAIL_AL_COMMANDS))
| 40.304348
| 98
| 0.627023
|
25a17b1ea1c5f307e4bc931c973ab1c54677da77
| 1,905
|
py
|
Python
|
editor/ui/layerwidget.py
|
JCash/fontcreator
|
05297452ee18cf574988471e3cca8ddf0681d6e0
|
[
"Zlib"
] | 5
|
2016-07-27T10:52:53.000Z
|
2021-02-22T16:03:49.000Z
|
editor/ui/layerwidget.py
|
JCash/fontcreator
|
05297452ee18cf574988471e3cca8ddf0681d6e0
|
[
"Zlib"
] | null | null | null |
editor/ui/layerwidget.py
|
JCash/fontcreator
|
05297452ee18cf574988471e3cca8ddf0681d6e0
|
[
"Zlib"
] | 2
|
2015-01-25T09:56:03.000Z
|
2016-07-27T10:52:56.000Z
|
from PySide import QtGui
import toolbar
import propertyview
from properties.propertyclass import property_class
from properties.propertytypes import IntProperty
def lerp(a,b,t):
return (t * b) + ((1.0 - t) * a)
class LayerWidget(QtGui.QWidget):
def __init__(self, parent=None):
super(LayerWidget, self).__init__(parent=parent)
self.setMinimumWidth( 240 )
layout = QtGui.QVBoxLayout()
layout.setContentsMargins(2,2,2,2)
self.Toolbar = toolbar.Toolbar(self)
bn = QtGui.QToolButton(self)
bn.setAutoRaise(True)
bn.setIcon(QtGui.QIcon("icons:/additive.png"))
bn.clicked.connect(self.OnAddLayer)
self.Toolbar.AddWidget(bn)
self.PropertyView = propertyview.PropertyView()
@property_class()
class TestObject(object):
int0 = IntProperty(0)
int1 = IntProperty(1)
int2 = IntProperty(2)
int3 = IntProperty(3)
self.testobject = TestObject()
layout.addWidget(self.Toolbar)
layout.addWidget(self.PropertyView)
self.PropertyView.Populate([self.testobject])
layout.addStretch()
self.setLayout(layout)
def OnAddLayer(self):
pass
def AddWidget(self):
pass
def paintEvent(self, ev):
p = QtGui.QPainter(self)
c1 = self.palette().color(QtGui.QPalette.Mid)
c2 = self.palette().color(QtGui.QPalette.Window)
t = 0.75
c = QtGui.QColor.fromRgb(lerp(c1.red(), c2.red(), t), lerp(c1.green(), c2.green(), t), lerp(c1.blue(), c2.blue(), t))
p.fillRect(1,1,self.width()-2,self.height()-2, c)
super(LayerWidget, self).paintEvent(ev)
| 29.307692
| 126
| 0.560105
|
1865fd589b1ef29e0408658d3a1fa3d872ab8cc5
| 42,987
|
py
|
Python
|
desktop/core/ext-py/SQLAlchemy-1.3.16/test/dialect/mssql/test_compiler.py
|
e11it/hue-1
|
436704c40b5fa6ffd30bd972bf50ffeec738d091
|
[
"Apache-2.0"
] | 5,079
|
2015-01-01T03:39:46.000Z
|
2022-03-31T07:38:22.000Z
|
desktop/core/ext-py/SQLAlchemy-1.3.16/test/dialect/mssql/test_compiler.py
|
e11it/hue-1
|
436704c40b5fa6ffd30bd972bf50ffeec738d091
|
[
"Apache-2.0"
] | 1,623
|
2015-01-01T08:06:24.000Z
|
2022-03-30T19:48:52.000Z
|
desktop/core/ext-py/SQLAlchemy-1.3.16/test/dialect/mssql/test_compiler.py
|
e11it/hue-1
|
436704c40b5fa6ffd30bd972bf50ffeec738d091
|
[
"Apache-2.0"
] | 2,033
|
2015-01-04T07:18:02.000Z
|
2022-03-28T19:55:47.000Z
|
# -*- encoding: utf-8
from sqlalchemy import Column
from sqlalchemy import Computed
from sqlalchemy import delete
from sqlalchemy import extract
from sqlalchemy import func
from sqlalchemy import Index
from sqlalchemy import insert
from sqlalchemy import Integer
from sqlalchemy import literal
from sqlalchemy import literal_column
from sqlalchemy import MetaData
from sqlalchemy import PrimaryKeyConstraint
from sqlalchemy import schema
from sqlalchemy import select
from sqlalchemy import Sequence
from sqlalchemy import sql
from sqlalchemy import String
from sqlalchemy import Table
from sqlalchemy import testing
from sqlalchemy import union
from sqlalchemy import UniqueConstraint
from sqlalchemy import update
from sqlalchemy.dialects import mssql
from sqlalchemy.dialects.mssql import mxodbc
from sqlalchemy.dialects.mssql.base import try_cast
from sqlalchemy.sql import column
from sqlalchemy.sql import quoted_name
from sqlalchemy.sql import table
from sqlalchemy.testing import AssertsCompiledSQL
from sqlalchemy.testing import eq_
from sqlalchemy.testing import fixtures
from sqlalchemy.testing import is_
class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
__dialect__ = mssql.dialect()
def test_true_false(self):
self.assert_compile(sql.false(), "0")
self.assert_compile(sql.true(), "1")
def test_select(self):
t = table("sometable", column("somecolumn"))
self.assert_compile(
t.select(), "SELECT sometable.somecolumn FROM sometable"
)
def test_select_with_nolock(self):
t = table("sometable", column("somecolumn"))
self.assert_compile(
t.select().with_hint(t, "WITH (NOLOCK)"),
"SELECT sometable.somecolumn FROM sometable WITH (NOLOCK)",
)
def test_select_with_nolock_schema(self):
m = MetaData()
t = Table(
"sometable", m, Column("somecolumn", Integer), schema="test_schema"
)
self.assert_compile(
t.select().with_hint(t, "WITH (NOLOCK)"),
"SELECT test_schema.sometable.somecolumn "
"FROM test_schema.sometable WITH (NOLOCK)",
)
def test_select_w_order_by_collate(self):
m = MetaData()
t = Table("sometable", m, Column("somecolumn", String))
self.assert_compile(
select([t]).order_by(
t.c.somecolumn.collate("Latin1_General_CS_AS_KS_WS_CI").asc()
),
"SELECT sometable.somecolumn FROM sometable "
"ORDER BY sometable.somecolumn COLLATE "
"Latin1_General_CS_AS_KS_WS_CI ASC",
)
def test_join_with_hint(self):
t1 = table(
"t1",
column("a", Integer),
column("b", String),
column("c", String),
)
t2 = table(
"t2",
column("a", Integer),
column("b", Integer),
column("c", Integer),
)
join = (
t1.join(t2, t1.c.a == t2.c.a)
.select()
.with_hint(t1, "WITH (NOLOCK)")
)
self.assert_compile(
join,
"SELECT t1.a, t1.b, t1.c, t2.a, t2.b, t2.c "
"FROM t1 WITH (NOLOCK) JOIN t2 ON t1.a = t2.a",
)
def test_insert(self):
t = table("sometable", column("somecolumn"))
self.assert_compile(
t.insert(),
"INSERT INTO sometable (somecolumn) VALUES " "(:somecolumn)",
)
def test_update(self):
t = table("sometable", column("somecolumn"))
self.assert_compile(
t.update(t.c.somecolumn == 7),
"UPDATE sometable SET somecolumn=:somecolum"
"n WHERE sometable.somecolumn = "
":somecolumn_1",
dict(somecolumn=10),
)
def test_insert_hint(self):
t = table("sometable", column("somecolumn"))
for targ in (None, t):
for darg in ("*", "mssql"):
self.assert_compile(
t.insert()
.values(somecolumn="x")
.with_hint(
"WITH (PAGLOCK)", selectable=targ, dialect_name=darg
),
"INSERT INTO sometable WITH (PAGLOCK) "
"(somecolumn) VALUES (:somecolumn)",
)
def test_update_hint(self):
t = table("sometable", column("somecolumn"))
for targ in (None, t):
for darg in ("*", "mssql"):
self.assert_compile(
t.update()
.where(t.c.somecolumn == "q")
.values(somecolumn="x")
.with_hint(
"WITH (PAGLOCK)", selectable=targ, dialect_name=darg
),
"UPDATE sometable WITH (PAGLOCK) "
"SET somecolumn=:somecolumn "
"WHERE sometable.somecolumn = :somecolumn_1",
)
def test_update_exclude_hint(self):
t = table("sometable", column("somecolumn"))
self.assert_compile(
t.update()
.where(t.c.somecolumn == "q")
.values(somecolumn="x")
.with_hint("XYZ", "mysql"),
"UPDATE sometable SET somecolumn=:somecolumn "
"WHERE sometable.somecolumn = :somecolumn_1",
)
def test_delete_hint(self):
t = table("sometable", column("somecolumn"))
for targ in (None, t):
for darg in ("*", "mssql"):
self.assert_compile(
t.delete()
.where(t.c.somecolumn == "q")
.with_hint(
"WITH (PAGLOCK)", selectable=targ, dialect_name=darg
),
"DELETE FROM sometable WITH (PAGLOCK) "
"WHERE sometable.somecolumn = :somecolumn_1",
)
def test_delete_exclude_hint(self):
t = table("sometable", column("somecolumn"))
self.assert_compile(
t.delete()
.where(t.c.somecolumn == "q")
.with_hint("XYZ", dialect_name="mysql"),
"DELETE FROM sometable WHERE "
"sometable.somecolumn = :somecolumn_1",
)
def test_delete_extra_froms(self):
t1 = table("t1", column("c1"))
t2 = table("t2", column("c1"))
q = sql.delete(t1).where(t1.c.c1 == t2.c.c1)
self.assert_compile(
q, "DELETE FROM t1 FROM t1, t2 WHERE t1.c1 = t2.c1"
)
def test_delete_extra_froms_alias(self):
a1 = table("t1", column("c1")).alias("a1")
t2 = table("t2", column("c1"))
q = sql.delete(a1).where(a1.c.c1 == t2.c.c1)
self.assert_compile(
q, "DELETE FROM a1 FROM t1 AS a1, t2 WHERE a1.c1 = t2.c1"
)
self.assert_compile(sql.delete(a1), "DELETE FROM t1 AS a1")
def test_update_from(self):
metadata = MetaData()
table1 = Table(
"mytable",
metadata,
Column("myid", Integer),
Column("name", String(30)),
Column("description", String(50)),
)
table2 = Table(
"myothertable",
metadata,
Column("otherid", Integer),
Column("othername", String(30)),
)
mt = table1.alias()
u = (
table1.update()
.values(name="foo")
.where(table2.c.otherid == table1.c.myid)
)
# testing mssql.base.MSSQLCompiler.update_from_clause
self.assert_compile(
u,
"UPDATE mytable SET name=:name "
"FROM mytable, myothertable WHERE "
"myothertable.otherid = mytable.myid",
)
self.assert_compile(
u.where(table2.c.othername == mt.c.name),
"UPDATE mytable SET name=:name "
"FROM mytable, myothertable, mytable AS mytable_1 "
"WHERE myothertable.otherid = mytable.myid "
"AND myothertable.othername = mytable_1.name",
)
def test_update_from_hint(self):
t = table("sometable", column("somecolumn"))
t2 = table("othertable", column("somecolumn"))
for darg in ("*", "mssql"):
self.assert_compile(
t.update()
.where(t.c.somecolumn == t2.c.somecolumn)
.values(somecolumn="x")
.with_hint("WITH (PAGLOCK)", selectable=t2, dialect_name=darg),
"UPDATE sometable SET somecolumn=:somecolumn "
"FROM sometable, othertable WITH (PAGLOCK) "
"WHERE sometable.somecolumn = othertable.somecolumn",
)
def test_update_to_select_schema(self):
meta = MetaData()
table = Table(
"sometable",
meta,
Column("sym", String),
Column("val", Integer),
schema="schema",
)
other = Table(
"#other", meta, Column("sym", String), Column("newval", Integer)
)
stmt = table.update().values(
val=select([other.c.newval])
.where(table.c.sym == other.c.sym)
.as_scalar()
)
self.assert_compile(
stmt,
"UPDATE [schema].sometable SET val="
"(SELECT [#other].newval FROM [#other] "
"WHERE [schema].sometable.sym = [#other].sym)",
)
stmt = (
table.update()
.values(val=other.c.newval)
.where(table.c.sym == other.c.sym)
)
self.assert_compile(
stmt,
"UPDATE [schema].sometable SET val="
"[#other].newval FROM [schema].sometable, "
"[#other] WHERE [schema].sometable.sym = [#other].sym",
)
# TODO: not supported yet.
# def test_delete_from_hint(self):
# t = table('sometable', column('somecolumn'))
# t2 = table('othertable', column('somecolumn'))
# for darg in ("*", "mssql"):
# self.assert_compile(
# t.delete().where(t.c.somecolumn==t2.c.somecolumn).
# with_hint("WITH (PAGLOCK)",
# selectable=t2,
# dialect_name=darg),
# ""
# )
def test_strict_binds(self):
"""test the 'strict' compiler binds."""
from sqlalchemy.dialects.mssql.base import MSSQLStrictCompiler
mxodbc_dialect = mxodbc.dialect()
mxodbc_dialect.statement_compiler = MSSQLStrictCompiler
t = table("sometable", column("foo"))
for expr, compiled in [
(
select([literal("x"), literal("y")]),
"SELECT 'x' AS anon_1, 'y' AS anon_2",
),
(
select([t]).where(t.c.foo.in_(["x", "y", "z"])),
"SELECT sometable.foo FROM sometable WHERE sometable.foo "
"IN ('x', 'y', 'z')",
),
(t.c.foo.in_([None]), "sometable.foo IN (NULL)"),
]:
self.assert_compile(expr, compiled, dialect=mxodbc_dialect)
def test_in_with_subqueries(self):
"""Test removal of legacy behavior that converted "x==subquery"
to use IN.
"""
t = table("sometable", column("somecolumn"))
self.assert_compile(
t.select().where(t.c.somecolumn == t.select()),
"SELECT sometable.somecolumn FROM "
"sometable WHERE sometable.somecolumn = "
"(SELECT sometable.somecolumn FROM "
"sometable)",
)
self.assert_compile(
t.select().where(t.c.somecolumn != t.select()),
"SELECT sometable.somecolumn FROM "
"sometable WHERE sometable.somecolumn != "
"(SELECT sometable.somecolumn FROM "
"sometable)",
)
@testing.uses_deprecated
def test_count(self):
t = table("sometable", column("somecolumn"))
self.assert_compile(
t.count(),
"SELECT count(sometable.somecolumn) AS "
"tbl_row_count FROM sometable",
)
def test_noorderby_insubquery(self):
"""test that the ms-sql dialect removes ORDER BY clauses from
subqueries"""
table1 = table(
"mytable",
column("myid", Integer),
column("name", String),
column("description", String),
)
q = select([table1.c.myid], order_by=[table1.c.myid]).alias("foo")
crit = q.c.myid == table1.c.myid
self.assert_compile(
select(["*"], crit),
"SELECT * FROM (SELECT mytable.myid AS "
"myid FROM mytable) AS foo, mytable WHERE "
"foo.myid = mytable.myid",
)
def test_noorderby_parameters_insubquery(self):
"""test that the ms-sql dialect does not include ORDER BY
positional parameters in subqueries"""
table1 = table(
"mytable",
column("myid", Integer),
column("name", String),
column("description", String),
)
q = select(
[table1.c.myid, sql.literal("bar").label("c1")],
order_by=[table1.c.name + "-"],
).alias("foo")
crit = q.c.myid == table1.c.myid
dialect = mssql.dialect()
dialect.paramstyle = "qmark"
dialect.positional = True
self.assert_compile(
select(["*"], crit),
"SELECT * FROM (SELECT mytable.myid AS "
"myid, ? AS c1 FROM mytable) AS foo, mytable WHERE "
"foo.myid = mytable.myid",
dialect=dialect,
checkparams={"param_1": "bar"},
# if name_1 is included, too many parameters are passed to dbapi
checkpositional=("bar",),
)
def test_force_schema_quoted_name_w_dot_case_insensitive(self):
metadata = MetaData()
tbl = Table(
"test",
metadata,
Column("id", Integer, primary_key=True),
schema=quoted_name("foo.dbo", True),
)
self.assert_compile(
select([tbl]), "SELECT [foo.dbo].test.id FROM [foo.dbo].test"
)
def test_force_schema_quoted_w_dot_case_insensitive(self):
metadata = MetaData()
tbl = Table(
"test",
metadata,
Column("id", Integer, primary_key=True),
schema=quoted_name("foo.dbo", True),
)
self.assert_compile(
select([tbl]), "SELECT [foo.dbo].test.id FROM [foo.dbo].test"
)
def test_force_schema_quoted_name_w_dot_case_sensitive(self):
metadata = MetaData()
tbl = Table(
"test",
metadata,
Column("id", Integer, primary_key=True),
schema=quoted_name("Foo.dbo", True),
)
self.assert_compile(
select([tbl]), "SELECT [Foo.dbo].test.id FROM [Foo.dbo].test"
)
def test_force_schema_quoted_w_dot_case_sensitive(self):
metadata = MetaData()
tbl = Table(
"test",
metadata,
Column("id", Integer, primary_key=True),
schema="[Foo.dbo]",
)
self.assert_compile(
select([tbl]), "SELECT [Foo.dbo].test.id FROM [Foo.dbo].test"
)
def test_schema_autosplit_w_dot_case_insensitive(self):
metadata = MetaData()
tbl = Table(
"test",
metadata,
Column("id", Integer, primary_key=True),
schema="foo.dbo",
)
self.assert_compile(
select([tbl]), "SELECT foo.dbo.test.id FROM foo.dbo.test"
)
def test_schema_autosplit_w_dot_case_sensitive(self):
metadata = MetaData()
tbl = Table(
"test",
metadata,
Column("id", Integer, primary_key=True),
schema="Foo.dbo",
)
self.assert_compile(
select([tbl]), "SELECT [Foo].dbo.test.id FROM [Foo].dbo.test"
)
def test_delete_schema(self):
metadata = MetaData()
tbl = Table(
"test",
metadata,
Column("id", Integer, primary_key=True),
schema="paj",
)
self.assert_compile(
tbl.delete(tbl.c.id == 1),
"DELETE FROM paj.test WHERE paj.test.id = " ":id_1",
)
s = select([tbl.c.id]).where(tbl.c.id == 1)
self.assert_compile(
tbl.delete().where(tbl.c.id.in_(s)),
"DELETE FROM paj.test WHERE paj.test.id IN "
"(SELECT paj.test.id FROM paj.test "
"WHERE paj.test.id = :id_1)",
)
def test_delete_schema_multipart(self):
metadata = MetaData()
tbl = Table(
"test",
metadata,
Column("id", Integer, primary_key=True),
schema="banana.paj",
)
self.assert_compile(
tbl.delete(tbl.c.id == 1),
"DELETE FROM banana.paj.test WHERE " "banana.paj.test.id = :id_1",
)
s = select([tbl.c.id]).where(tbl.c.id == 1)
self.assert_compile(
tbl.delete().where(tbl.c.id.in_(s)),
"DELETE FROM banana.paj.test WHERE "
"banana.paj.test.id IN (SELECT banana.paj.test.id "
"FROM banana.paj.test WHERE "
"banana.paj.test.id = :id_1)",
)
def test_delete_schema_multipart_needs_quoting(self):
metadata = MetaData()
tbl = Table(
"test",
metadata,
Column("id", Integer, primary_key=True),
schema="banana split.paj",
)
self.assert_compile(
tbl.delete(tbl.c.id == 1),
"DELETE FROM [banana split].paj.test WHERE "
"[banana split].paj.test.id = :id_1",
)
s = select([tbl.c.id]).where(tbl.c.id == 1)
self.assert_compile(
tbl.delete().where(tbl.c.id.in_(s)),
"DELETE FROM [banana split].paj.test WHERE "
"[banana split].paj.test.id IN ("
"SELECT [banana split].paj.test.id FROM "
"[banana split].paj.test WHERE "
"[banana split].paj.test.id = :id_1)",
)
def test_delete_schema_multipart_both_need_quoting(self):
metadata = MetaData()
tbl = Table(
"test",
metadata,
Column("id", Integer, primary_key=True),
schema="banana split.paj with a space",
)
self.assert_compile(
tbl.delete(tbl.c.id == 1),
"DELETE FROM [banana split].[paj with a "
"space].test WHERE [banana split].[paj "
"with a space].test.id = :id_1",
)
s = select([tbl.c.id]).where(tbl.c.id == 1)
self.assert_compile(
tbl.delete().where(tbl.c.id.in_(s)),
"DELETE FROM [banana split].[paj with a space].test "
"WHERE [banana split].[paj with a space].test.id IN "
"(SELECT [banana split].[paj with a space].test.id "
"FROM [banana split].[paj with a space].test "
"WHERE [banana split].[paj with a space].test.id = :id_1)",
)
def test_union(self):
t1 = table(
"t1",
column("col1"),
column("col2"),
column("col3"),
column("col4"),
)
t2 = table(
"t2",
column("col1"),
column("col2"),
column("col3"),
column("col4"),
)
s1, s2 = (
select(
[t1.c.col3.label("col3"), t1.c.col4.label("col4")],
t1.c.col2.in_(["t1col2r1", "t1col2r2"]),
),
select(
[t2.c.col3.label("col3"), t2.c.col4.label("col4")],
t2.c.col2.in_(["t2col2r2", "t2col2r3"]),
),
)
u = union(s1, s2, order_by=["col3", "col4"])
self.assert_compile(
u,
"SELECT t1.col3 AS col3, t1.col4 AS col4 "
"FROM t1 WHERE t1.col2 IN (:col2_1, "
":col2_2) UNION SELECT t2.col3 AS col3, "
"t2.col4 AS col4 FROM t2 WHERE t2.col2 IN "
"(:col2_3, :col2_4) ORDER BY col3, col4",
)
self.assert_compile(
u.alias("bar").select(),
"SELECT bar.col3, bar.col4 FROM (SELECT "
"t1.col3 AS col3, t1.col4 AS col4 FROM t1 "
"WHERE t1.col2 IN (:col2_1, :col2_2) UNION "
"SELECT t2.col3 AS col3, t2.col4 AS col4 "
"FROM t2 WHERE t2.col2 IN (:col2_3, "
":col2_4)) AS bar",
)
def test_function(self):
self.assert_compile(func.foo(1, 2), "foo(:foo_1, :foo_2)")
self.assert_compile(func.current_time(), "CURRENT_TIME")
self.assert_compile(func.foo(), "foo()")
m = MetaData()
t = Table(
"sometable", m, Column("col1", Integer), Column("col2", Integer)
)
self.assert_compile(
select([func.max(t.c.col1)]),
"SELECT max(sometable.col1) AS max_1 FROM " "sometable",
)
def test_function_overrides(self):
self.assert_compile(func.current_date(), "GETDATE()")
self.assert_compile(func.length(3), "LEN(:length_1)")
def test_extract(self):
t = table("t", column("col1"))
for field in "day", "month", "year":
self.assert_compile(
select([extract(field, t.c.col1)]),
"SELECT DATEPART(%s, t.col1) AS anon_1 FROM t" % field,
)
def test_update_returning(self):
table1 = table(
"mytable",
column("myid", Integer),
column("name", String(128)),
column("description", String(128)),
)
u = update(table1, values=dict(name="foo")).returning(
table1.c.myid, table1.c.name
)
self.assert_compile(
u,
"UPDATE mytable SET name=:name OUTPUT "
"inserted.myid, inserted.name",
)
u = update(table1, values=dict(name="foo")).returning(table1)
self.assert_compile(
u,
"UPDATE mytable SET name=:name OUTPUT "
"inserted.myid, inserted.name, "
"inserted.description",
)
u = (
update(table1, values=dict(name="foo"))
.returning(table1)
.where(table1.c.name == "bar")
)
self.assert_compile(
u,
"UPDATE mytable SET name=:name OUTPUT "
"inserted.myid, inserted.name, "
"inserted.description WHERE mytable.name = "
":name_1",
)
u = update(table1, values=dict(name="foo")).returning(
func.length(table1.c.name)
)
self.assert_compile(
u,
"UPDATE mytable SET name=:name OUTPUT "
"LEN(inserted.name) AS length_1",
)
def test_delete_returning(self):
table1 = table(
"mytable",
column("myid", Integer),
column("name", String(128)),
column("description", String(128)),
)
d = delete(table1).returning(table1.c.myid, table1.c.name)
self.assert_compile(
d, "DELETE FROM mytable OUTPUT deleted.myid, " "deleted.name"
)
d = (
delete(table1)
.where(table1.c.name == "bar")
.returning(table1.c.myid, table1.c.name)
)
self.assert_compile(
d,
"DELETE FROM mytable OUTPUT deleted.myid, "
"deleted.name WHERE mytable.name = :name_1",
)
def test_insert_returning(self):
table1 = table(
"mytable",
column("myid", Integer),
column("name", String(128)),
column("description", String(128)),
)
i = insert(table1, values=dict(name="foo")).returning(
table1.c.myid, table1.c.name
)
self.assert_compile(
i,
"INSERT INTO mytable (name) OUTPUT "
"inserted.myid, inserted.name VALUES "
"(:name)",
)
i = insert(table1, values=dict(name="foo")).returning(table1)
self.assert_compile(
i,
"INSERT INTO mytable (name) OUTPUT "
"inserted.myid, inserted.name, "
"inserted.description VALUES (:name)",
)
i = insert(table1, values=dict(name="foo")).returning(
func.length(table1.c.name)
)
self.assert_compile(
i,
"INSERT INTO mytable (name) OUTPUT "
"LEN(inserted.name) AS length_1 VALUES "
"(:name)",
)
def test_limit_using_top(self):
t = table("t", column("x", Integer), column("y", Integer))
s = select([t]).where(t.c.x == 5).order_by(t.c.y).limit(10)
self.assert_compile(
s,
"SELECT TOP 10 t.x, t.y FROM t WHERE t.x = :x_1 ORDER BY t.y",
checkparams={"x_1": 5},
)
def test_limit_zero_using_top(self):
t = table("t", column("x", Integer), column("y", Integer))
s = select([t]).where(t.c.x == 5).order_by(t.c.y).limit(0)
self.assert_compile(
s,
"SELECT TOP 0 t.x, t.y FROM t WHERE t.x = :x_1 ORDER BY t.y",
checkparams={"x_1": 5},
)
c = s.compile(dialect=mssql.dialect())
eq_(len(c._result_columns), 2)
assert t.c.x in set(c._create_result_map()["x"][1])
def test_offset_using_window(self):
t = table("t", column("x", Integer), column("y", Integer))
s = select([t]).where(t.c.x == 5).order_by(t.c.y).offset(20)
# test that the select is not altered with subsequent compile
# calls
for i in range(2):
self.assert_compile(
s,
"SELECT anon_1.x, anon_1.y FROM (SELECT t.x AS x, t.y "
"AS y, ROW_NUMBER() OVER (ORDER BY t.y) AS "
"mssql_rn FROM t WHERE t.x = :x_1) AS "
"anon_1 WHERE mssql_rn > :param_1",
checkparams={"param_1": 20, "x_1": 5},
)
c = s.compile(dialect=mssql.dialect())
eq_(len(c._result_columns), 2)
assert t.c.x in set(c._create_result_map()["x"][1])
def test_simple_limit_expression_offset_using_window(self):
t = table("t", column("x", Integer), column("y", Integer))
s = (
select([t])
.where(t.c.x == 5)
.order_by(t.c.y)
.limit(10)
.offset(literal_column("20"))
)
self.assert_compile(
s,
"SELECT anon_1.x, anon_1.y "
"FROM (SELECT t.x AS x, t.y AS y, "
"ROW_NUMBER() OVER (ORDER BY t.y) AS mssql_rn "
"FROM t "
"WHERE t.x = :x_1) AS anon_1 "
"WHERE mssql_rn > 20 AND mssql_rn <= :param_1 + 20",
checkparams={"param_1": 10, "x_1": 5},
)
def test_limit_offset_using_window(self):
t = table("t", column("x", Integer), column("y", Integer))
s = select([t]).where(t.c.x == 5).order_by(t.c.y).limit(10).offset(20)
self.assert_compile(
s,
"SELECT anon_1.x, anon_1.y "
"FROM (SELECT t.x AS x, t.y AS y, "
"ROW_NUMBER() OVER (ORDER BY t.y) AS mssql_rn "
"FROM t "
"WHERE t.x = :x_1) AS anon_1 "
"WHERE mssql_rn > :param_1 AND mssql_rn <= :param_2 + :param_1",
checkparams={"param_1": 20, "param_2": 10, "x_1": 5},
)
c = s.compile(dialect=mssql.dialect())
eq_(len(c._result_columns), 2)
assert t.c.x in set(c._create_result_map()["x"][1])
assert t.c.y in set(c._create_result_map()["y"][1])
def test_limit_offset_w_ambiguous_cols(self):
t = table("t", column("x", Integer), column("y", Integer))
cols = [t.c.x, t.c.x.label("q"), t.c.x.label("p"), t.c.y]
s = select(cols).where(t.c.x == 5).order_by(t.c.y).limit(10).offset(20)
self.assert_compile(
s,
"SELECT anon_1.x, anon_1.q, anon_1.p, anon_1.y "
"FROM (SELECT t.x AS x, t.x AS q, t.x AS p, t.y AS y, "
"ROW_NUMBER() OVER (ORDER BY t.y) AS mssql_rn "
"FROM t "
"WHERE t.x = :x_1) AS anon_1 "
"WHERE mssql_rn > :param_1 AND mssql_rn <= :param_2 + :param_1",
checkparams={"param_1": 20, "param_2": 10, "x_1": 5},
)
c = s.compile(dialect=mssql.dialect())
eq_(len(c._result_columns), 4)
result_map = c._create_result_map()
for col in cols:
is_(result_map[col.key][1][0], col)
def test_limit_offset_with_correlated_order_by(self):
t1 = table("t1", column("x", Integer), column("y", Integer))
t2 = table("t2", column("x", Integer), column("y", Integer))
order_by = select([t2.c.y]).where(t1.c.x == t2.c.x).as_scalar()
s = (
select([t1])
.where(t1.c.x == 5)
.order_by(order_by)
.limit(10)
.offset(20)
)
self.assert_compile(
s,
"SELECT anon_1.x, anon_1.y "
"FROM (SELECT t1.x AS x, t1.y AS y, "
"ROW_NUMBER() OVER (ORDER BY "
"(SELECT t2.y FROM t2 WHERE t1.x = t2.x)"
") AS mssql_rn "
"FROM t1 "
"WHERE t1.x = :x_1) AS anon_1 "
"WHERE mssql_rn > :param_1 AND mssql_rn <= :param_2 + :param_1",
checkparams={"param_1": 20, "param_2": 10, "x_1": 5},
)
c = s.compile(dialect=mssql.dialect())
eq_(len(c._result_columns), 2)
assert t1.c.x in set(c._create_result_map()["x"][1])
assert t1.c.y in set(c._create_result_map()["y"][1])
def test_offset_dont_misapply_labelreference(self):
m = MetaData()
t = Table("t", m, Column("x", Integer))
expr1 = func.foo(t.c.x).label("x")
expr2 = func.foo(t.c.x).label("y")
stmt1 = select([expr1]).order_by(expr1.desc()).offset(1)
stmt2 = select([expr2]).order_by(expr2.desc()).offset(1)
self.assert_compile(
stmt1,
"SELECT anon_1.x FROM (SELECT foo(t.x) AS x, "
"ROW_NUMBER() OVER (ORDER BY foo(t.x) DESC) AS mssql_rn FROM t) "
"AS anon_1 WHERE mssql_rn > :param_1",
)
self.assert_compile(
stmt2,
"SELECT anon_1.y FROM (SELECT foo(t.x) AS y, "
"ROW_NUMBER() OVER (ORDER BY foo(t.x) DESC) AS mssql_rn FROM t) "
"AS anon_1 WHERE mssql_rn > :param_1",
)
def test_limit_zero_offset_using_window(self):
t = table("t", column("x", Integer), column("y", Integer))
s = select([t]).where(t.c.x == 5).order_by(t.c.y).limit(0).offset(0)
# render the LIMIT of zero, but not the OFFSET
# of zero, so produces TOP 0
self.assert_compile(
s,
"SELECT TOP 0 t.x, t.y FROM t " "WHERE t.x = :x_1 ORDER BY t.y",
checkparams={"x_1": 5},
)
def test_primary_key_no_identity(self):
metadata = MetaData()
tbl = Table(
"test",
metadata,
Column("id", Integer, autoincrement=False, primary_key=True),
)
self.assert_compile(
schema.CreateTable(tbl),
"CREATE TABLE test (id INTEGER NOT NULL, " "PRIMARY KEY (id))",
)
def test_primary_key_defaults_to_identity(self):
metadata = MetaData()
tbl = Table("test", metadata, Column("id", Integer, primary_key=True))
self.assert_compile(
schema.CreateTable(tbl),
"CREATE TABLE test (id INTEGER NOT NULL IDENTITY(1,1), "
"PRIMARY KEY (id))",
)
def test_identity_no_primary_key(self):
metadata = MetaData()
tbl = Table(
"test", metadata, Column("id", Integer, autoincrement=True)
)
self.assert_compile(
schema.CreateTable(tbl),
"CREATE TABLE test (id INTEGER NOT NULL IDENTITY(1,1)" ")",
)
def test_identity_separate_from_primary_key(self):
metadata = MetaData()
tbl = Table(
"test",
metadata,
Column("id", Integer, autoincrement=False, primary_key=True),
Column("x", Integer, autoincrement=True),
)
self.assert_compile(
schema.CreateTable(tbl),
"CREATE TABLE test (id INTEGER NOT NULL, "
"x INTEGER NOT NULL IDENTITY(1,1), "
"PRIMARY KEY (id))",
)
def test_identity_illegal_two_autoincrements(self):
metadata = MetaData()
tbl = Table(
"test",
metadata,
Column("id", Integer, autoincrement=True),
Column("id2", Integer, autoincrement=True),
)
# this will be rejected by the database, just asserting this is what
# the two autoincrements will do right now
self.assert_compile(
schema.CreateTable(tbl),
"CREATE TABLE test (id INTEGER NOT NULL IDENTITY(1,1), "
"id2 INTEGER NOT NULL IDENTITY(1,1))",
)
def test_identity_start_0(self):
metadata = MetaData()
tbl = Table(
"test",
metadata,
Column("id", Integer, mssql_identity_start=0, primary_key=True),
)
self.assert_compile(
schema.CreateTable(tbl),
"CREATE TABLE test (id INTEGER NOT NULL IDENTITY(0,1), "
"PRIMARY KEY (id))",
)
def test_identity_increment_5(self):
metadata = MetaData()
tbl = Table(
"test",
metadata,
Column(
"id", Integer, mssql_identity_increment=5, primary_key=True
),
)
self.assert_compile(
schema.CreateTable(tbl),
"CREATE TABLE test (id INTEGER NOT NULL IDENTITY(1,5), "
"PRIMARY KEY (id))",
)
def test_sequence_start_0(self):
metadata = MetaData()
tbl = Table(
"test",
metadata,
Column("id", Integer, Sequence("", 0), primary_key=True),
)
with testing.expect_deprecated(
"Use of Sequence with SQL Server in order to affect "
):
self.assert_compile(
schema.CreateTable(tbl),
"CREATE TABLE test (id INTEGER NOT NULL IDENTITY(0,1), "
"PRIMARY KEY (id))",
)
def test_sequence_non_primary_key(self):
metadata = MetaData()
tbl = Table(
"test",
metadata,
Column("id", Integer, Sequence("", start=5), primary_key=False),
)
with testing.expect_deprecated(
"Use of Sequence with SQL Server in order to affect "
):
self.assert_compile(
schema.CreateTable(tbl),
"CREATE TABLE test (id INTEGER NOT NULL IDENTITY(5,1))",
)
def test_sequence_ignore_nullability(self):
metadata = MetaData()
tbl = Table(
"test",
metadata,
Column("id", Integer, Sequence("", start=5), nullable=True),
)
with testing.expect_deprecated(
"Use of Sequence with SQL Server in order to affect "
):
self.assert_compile(
schema.CreateTable(tbl),
"CREATE TABLE test (id INTEGER NOT NULL IDENTITY(5,1))",
)
def test_table_pkc_clustering(self):
metadata = MetaData()
tbl = Table(
"test",
metadata,
Column("x", Integer, autoincrement=False),
Column("y", Integer, autoincrement=False),
PrimaryKeyConstraint("x", "y", mssql_clustered=True),
)
self.assert_compile(
schema.CreateTable(tbl),
"CREATE TABLE test (x INTEGER NOT NULL, y INTEGER NOT NULL, "
"PRIMARY KEY CLUSTERED (x, y))",
)
def test_table_pkc_explicit_nonclustered(self):
metadata = MetaData()
tbl = Table(
"test",
metadata,
Column("x", Integer, autoincrement=False),
Column("y", Integer, autoincrement=False),
PrimaryKeyConstraint("x", "y", mssql_clustered=False),
)
self.assert_compile(
schema.CreateTable(tbl),
"CREATE TABLE test (x INTEGER NOT NULL, y INTEGER NOT NULL, "
"PRIMARY KEY NONCLUSTERED (x, y))",
)
def test_table_idx_explicit_nonclustered(self):
metadata = MetaData()
tbl = Table(
"test",
metadata,
Column("x", Integer, autoincrement=False),
Column("y", Integer, autoincrement=False),
)
idx = Index("myidx", tbl.c.x, tbl.c.y, mssql_clustered=False)
self.assert_compile(
schema.CreateIndex(idx),
"CREATE NONCLUSTERED INDEX myidx ON test (x, y)",
)
def test_table_uc_explicit_nonclustered(self):
metadata = MetaData()
tbl = Table(
"test",
metadata,
Column("x", Integer, autoincrement=False),
Column("y", Integer, autoincrement=False),
UniqueConstraint("x", "y", mssql_clustered=False),
)
self.assert_compile(
schema.CreateTable(tbl),
"CREATE TABLE test (x INTEGER NULL, y INTEGER NULL, "
"UNIQUE NONCLUSTERED (x, y))",
)
def test_table_uc_clustering(self):
metadata = MetaData()
tbl = Table(
"test",
metadata,
Column("x", Integer, autoincrement=False),
Column("y", Integer, autoincrement=False),
PrimaryKeyConstraint("x"),
UniqueConstraint("y", mssql_clustered=True),
)
self.assert_compile(
schema.CreateTable(tbl),
"CREATE TABLE test (x INTEGER NOT NULL, y INTEGER NULL, "
"PRIMARY KEY (x), UNIQUE CLUSTERED (y))",
)
def test_index_clustering(self):
metadata = MetaData()
tbl = Table("test", metadata, Column("id", Integer))
idx = Index("foo", tbl.c.id, mssql_clustered=True)
self.assert_compile(
schema.CreateIndex(idx), "CREATE CLUSTERED INDEX foo ON test (id)"
)
def test_index_where(self):
metadata = MetaData()
tbl = Table("test", metadata, Column("data", Integer))
idx = Index("test_idx_data_1", tbl.c.data, mssql_where=tbl.c.data > 1)
self.assert_compile(
schema.CreateIndex(idx),
"CREATE INDEX test_idx_data_1 ON test (data) WHERE data > 1",
)
def test_index_ordering(self):
metadata = MetaData()
tbl = Table(
"test",
metadata,
Column("x", Integer),
Column("y", Integer),
Column("z", Integer),
)
idx = Index("foo", tbl.c.x.desc(), "y")
self.assert_compile(
schema.CreateIndex(idx), "CREATE INDEX foo ON test (x DESC, y)"
)
def test_create_index_expr(self):
m = MetaData()
t1 = Table("foo", m, Column("x", Integer))
self.assert_compile(
schema.CreateIndex(Index("bar", t1.c.x > 5)),
"CREATE INDEX bar ON foo (x > 5)",
)
def test_drop_index_w_schema(self):
m = MetaData()
t1 = Table("foo", m, Column("x", Integer), schema="bar")
self.assert_compile(
schema.DropIndex(Index("idx_foo", t1.c.x)),
"DROP INDEX idx_foo ON bar.foo",
)
def test_index_extra_include_1(self):
metadata = MetaData()
tbl = Table(
"test",
metadata,
Column("x", Integer),
Column("y", Integer),
Column("z", Integer),
)
idx = Index("foo", tbl.c.x, mssql_include=["y"])
self.assert_compile(
schema.CreateIndex(idx), "CREATE INDEX foo ON test (x) INCLUDE (y)"
)
def test_index_extra_include_2(self):
metadata = MetaData()
tbl = Table(
"test",
metadata,
Column("x", Integer),
Column("y", Integer),
Column("z", Integer),
)
idx = Index("foo", tbl.c.x, mssql_include=[tbl.c.y])
self.assert_compile(
schema.CreateIndex(idx), "CREATE INDEX foo ON test (x) INCLUDE (y)"
)
def test_try_cast(self):
metadata = MetaData()
t1 = Table("t1", metadata, Column("id", Integer, primary_key=True))
self.assert_compile(
select([try_cast(t1.c.id, Integer)]),
"SELECT TRY_CAST (t1.id AS INTEGER) AS anon_1 FROM t1",
)
@testing.combinations(
("no_persisted", "", "ignore"),
("persisted_none", "", None),
("persisted_true", " PERSISTED", True),
("persisted_false", "", False),
id_="iaa",
)
def test_column_computed(self, text, persisted):
m = MetaData()
kwargs = {"persisted": persisted} if persisted != "ignore" else {}
t = Table(
"t",
m,
Column("x", Integer),
Column("y", Integer, Computed("x + 2", **kwargs)),
)
self.assert_compile(
schema.CreateTable(t),
"CREATE TABLE t (x INTEGER NULL, y AS (x + 2)%s)" % text,
)
class SchemaTest(fixtures.TestBase):
def setup(self):
t = Table(
"sometable",
MetaData(),
Column("pk_column", Integer),
Column("test_column", String),
)
self.column = t.c.test_column
dialect = mssql.dialect()
self.ddl_compiler = dialect.ddl_compiler(
dialect, schema.CreateTable(t)
)
def _column_spec(self):
return self.ddl_compiler.get_column_specification(self.column)
def test_that_mssql_default_nullability_emits_null(self):
eq_("test_column VARCHAR(max) NULL", self._column_spec())
def test_that_mssql_none_nullability_does_not_emit_nullability(self):
self.column.nullable = None
eq_("test_column VARCHAR(max)", self._column_spec())
def test_that_mssql_specified_nullable_emits_null(self):
self.column.nullable = True
eq_("test_column VARCHAR(max) NULL", self._column_spec())
def test_that_mssql_specified_not_nullable_emits_not_null(self):
self.column.nullable = False
eq_("test_column VARCHAR(max) NOT NULL", self._column_spec())
| 33.8214
| 79
| 0.528113
|
5d748bc8b870f43353e27832136e5f96e0ec99c8
| 1,173
|
py
|
Python
|
tests/integration/cvm/test_describe_instances.py
|
QcloudApi/qcloudapi-sdk-python
|
9b097e4f4089cb6432ada7593999fab92f31fb43
|
[
"Apache-2.0"
] | 212
|
2015-04-08T14:37:30.000Z
|
2021-12-24T10:27:13.000Z
|
tests/integration/cvm/test_describe_instances.py
|
QcloudApi/qcloudapi-sdk-python
|
9b097e4f4089cb6432ada7593999fab92f31fb43
|
[
"Apache-2.0"
] | 41
|
2016-02-23T06:17:41.000Z
|
2019-06-06T08:30:20.000Z
|
tests/integration/cvm/test_describe_instances.py
|
QcloudApi/qcloudapi-sdk-python
|
9b097e4f4089cb6432ada7593999fab92f31fb43
|
[
"Apache-2.0"
] | 92
|
2015-07-25T08:53:13.000Z
|
2020-09-01T09:51:43.000Z
|
# Copyright 1999-2017 Tencent Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import os
from QcloudApi import qcloudapi
def test_describe_instances():
config = {
"Region": "ap-guangzhou",
"secretId": os.environ.get("TENCENTCLOUD_SECRET_ID"),
"secretKey": os.environ.get("TENCENTCLOUD_SECRET_KEY"),
}
service = qcloudapi.QcloudApi("cvm", config)
params = {
"Version": "2017-03-12",
"Limit": 1,
}
result = service.call("DescribeInstances", params,
req_timeout=10, debug=True).decode("utf-8")
assert len(json.loads(result)["Response"].get("InstanceSet", [])) == 1
| 33.514286
| 74
| 0.685422
|
e73f05c94d2050b22ef431e3ae6da24eedbbcd4f
| 1,408
|
py
|
Python
|
numba_dpcomp/mlir/utils.py
|
Hardcode84/mlir-extensions
|
19d52fb473d2271224061057918ce337565b4496
|
[
"Apache-2.0"
] | null | null | null |
numba_dpcomp/mlir/utils.py
|
Hardcode84/mlir-extensions
|
19d52fb473d2271224061057918ce337565b4496
|
[
"Apache-2.0"
] | null | null | null |
numba_dpcomp/mlir/utils.py
|
Hardcode84/mlir-extensions
|
19d52fb473d2271224061057918ce337565b4496
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2021 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import ctypes
import os
import atexit
import sys
import numba_dpcomp
def load_lib(name):
runtime_search_paths = [os.path.dirname(numba_dpcomp.__file__)]
try:
runtime_search_paths += os.environ['PYTHONPATH'].split(os.pathsep)
except KeyError:
pass
if sys.platform.startswith('linux'):
lib_name = f'lib{name}.so'
elif sys.platform.startswith('darwin'):
lib_name = f'lib{name}.dylib'
elif sys.platform.startswith('win'):
lib_name = f'{name}.dll'
else:
return None
for path in runtime_search_paths:
lib_path = lib_name if len(path) == 0 else os.path.join(path, lib_name)
try:
return ctypes.CDLL(lib_path)
except:
pass
return None
def mlir_func_name(name):
return '_mlir_ciface_' + name
| 28.16
| 79
| 0.691761
|
2a494cce09c35216cc0e1dd3437ebd6270869bd9
| 352
|
py
|
Python
|
Proj029Pipelines/pipeline_docs/pipeline_report/trackers/Fastqc.py
|
CGATOxford/proj029
|
f0a8ea63b4f086e673aa3bf8b7d3b9749261b525
|
[
"BSD-3-Clause"
] | 3
|
2016-04-04T22:54:14.000Z
|
2017-04-01T09:37:54.000Z
|
Proj029Pipelines/pipeline_docs/pipeline_report/trackers/Fastqc.py
|
CGATOxford/proj029
|
f0a8ea63b4f086e673aa3bf8b7d3b9749261b525
|
[
"BSD-3-Clause"
] | null | null | null |
Proj029Pipelines/pipeline_docs/pipeline_report/trackers/Fastqc.py
|
CGATOxford/proj029
|
f0a8ea63b4f086e673aa3bf8b7d3b9749261b525
|
[
"BSD-3-Clause"
] | null | null | null |
from SphinxReport.Tracker import *
class ReadSummary(TrackerSQL):
'''
Summarise read counts for each track
'''
def __call__(self, track, slice=None):
return self.getAll("SELECT * FROM reads_summary")
class PerBaseQuality(TrackerImages):
'''
representative plots for fastqc quality
score analysis
'''
| 16.761905
| 57
| 0.664773
|
12bef9ad2f714a7bbaf291a11aa572245d4362f3
| 1,002
|
py
|
Python
|
src/test/test_norway.py
|
SoftXperience/astral
|
4e8433276ed542c2ad34cbc5d6b6f8a290d73cc7
|
[
"Apache-2.0"
] | 152
|
2016-08-16T22:42:12.000Z
|
2022-03-16T10:54:13.000Z
|
src/test/test_norway.py
|
SoftXperience/astral
|
4e8433276ed542c2ad34cbc5d6b6f8a290d73cc7
|
[
"Apache-2.0"
] | 64
|
2016-08-16T22:40:39.000Z
|
2022-03-07T10:20:11.000Z
|
src/test/test_norway.py
|
SoftXperience/astral
|
4e8433276ed542c2ad34cbc5d6b6f8a290d73cc7
|
[
"Apache-2.0"
] | 43
|
2016-08-16T22:35:05.000Z
|
2022-01-24T07:38:02.000Z
|
from __future__ import unicode_literals
import pytest
import astral
from datetime import datetime, timedelta
import pytz
from astral import sun
def _next_event(obs: astral.Observer, dt: datetime, event: str):
for offset in range(0, 365):
newdate = dt + timedelta(days=offset)
try:
t = getattr(sun, event)(date=newdate, observer=obs)
return t
except ValueError:
pass
assert False, "Should be unreachable" # pragma: no cover
def test_NorwaySunUp():
"""Test location in Norway where the sun doesn't set in summer."""
june = datetime(2019, 6, 5, tzinfo=pytz.utc)
obs = astral.Observer(69.6, 18.8, 0.0)
with pytest.raises(ValueError):
sun.sunrise(obs, june)
with pytest.raises(ValueError):
sun.sunset(obs, june)
# Find the next sunset and sunrise:
next_sunrise = _next_event(obs, june, "sunrise")
next_sunset = _next_event(obs, june, "sunset")
assert next_sunset < next_sunrise
| 27.833333
| 70
| 0.666667
|
fad72f138cc1dd95f09b9cdcbb87997c8d2bcf1b
| 6,298
|
py
|
Python
|
import_wikidata.py
|
DOsinga/wiki_import
|
890b65cd13e9a16cd0fca40f2a56f3db7f48f94a
|
[
"Apache-2.0"
] | 12
|
2016-09-13T12:57:39.000Z
|
2022-01-02T07:13:44.000Z
|
import_wikidata.py
|
DOsinga/wiki_import
|
890b65cd13e9a16cd0fca40f2a56f3db7f48f94a
|
[
"Apache-2.0"
] | null | null | null |
import_wikidata.py
|
DOsinga/wiki_import
|
890b65cd13e9a16cd0fca40f2a56f3db7f48f94a
|
[
"Apache-2.0"
] | 2
|
2019-06-24T02:07:12.000Z
|
2020-02-03T23:48:39.000Z
|
#!/usr/bin/env python
from collections import defaultdict
import argparse
import subprocess
import json
import psycopg2
from psycopg2 import extras
def setup_db(connection_string):
conn = psycopg2.connect(connection_string)
cursor = conn.cursor()
cursor.execute('DROP TABLE IF EXISTS wikidata')
cursor.execute(
'CREATE TABLE wikidata ('
' wikipedia_id TEXT PRIMARY KEY,'
' title TEXT,'
' wikidata_id TEXT,'
' description TEXT,'
' properties JSONB'
')'
)
cursor.execute('CREATE INDEX wikidata_wikidata_id ON wikidata(wikidata_id)')
cursor.execute('CREATE INDEX wikidata_properties ON wikidata USING gin(properties)')
return conn, cursor
def parse_wikidata(lines):
for line in lines:
line = line.strip()
if line and line[0] == '{':
if line[-1] == ',':
line = line[:-1]
yield json.loads(line)
def map_value(value, id_name_map):
if not value or not 'type' in value or not 'value' in value:
return None
typ = value['type']
value = value['value']
if typ == 'string':
return value
elif typ == 'wikibase-entityid':
entitiy_id = value['id']
return id_name_map.get(entitiy_id)
elif typ == 'time':
time_split = DATE_PARSE_RE.match(value['time'])
if not time_split:
return None
year, month, day, hour, minute, second = map(int, time_split.groups())
if day == 0:
day = 1
if month == 0:
month = 1
return '%04d-%02d-%02dT%02d:%02d:%02d' % (year, month, day, hour, minute, second)
elif typ == 'quantity':
return float(value['amount'])
elif typ == 'monolingualtext':
return value['text']
elif typ == 'globecoordinate':
lat = value.get('latitude')
lng = value.get('longitude')
if lat or lng:
res = {'lat': lat, 'lng': lng}
globe = value.get('globe', '').rsplit('/', 1)[-1]
if globe != 'Q2' and globe in id_name_map:
res['globe'] = globe
if value.get('altitude'):
res['altitude'] = value['altitude']
return res
return None
def main(dump, cursor):
"""We do two scans:
- first collect the id -> name / wikipedia title
- then store the actual objects with a json property.
The first step takes quite a bit of memory (5Gb) - could possibly be done using a temporary table in postgres.
"""
c = 0
skip = 0
id_name_map = {}
for d in parse_wikidata(subprocess.Popen(['bzcat'], stdin=open(dump), stdout=subprocess.PIPE).stdout):
c += 1
if c % 1000 == 0:
print(c, skip)
if d.get('sitelinks') and d['sitelinks'].get('enwiki'):
value = d['sitelinks']['enwiki']['title']
elif d['labels'].get('en'):
value = id_name_map[d['id']] = d['labels']['en']['value']
else:
skip += 1
continue
id_name_map[d['id']] = value
wp_ids = set()
c = 0
rec = 0
dupes = 0
for d in parse_wikidata(subprocess.Popen(['bzcat'], stdin=open(dump), stdout=subprocess.PIPE).stdout):
c += 1
if c % 1000 == 0:
print(c, rec, dupes)
wikipedia_id = d.get('sitelinks', {}).get('enwiki', {}).get('title')
title = d['labels'].get('en', {}).get('value')
description = d['descriptions'].get('en', {}).get('value')
wikidata_id = d['id']
properties = {}
if wikipedia_id and title:
# There are some duplicate wikipedia_id's in there. We could make wikidata_id the primary key
# but that doesn't fix the underlying dupe
if wikipedia_id in wp_ids:
dupes += 1
continue
wp_ids.add(wikipedia_id)
# Properties are mapped in a way where we create lists as values for wiki entities if there is more
# than one value. For other types, we always pick one value. If there is a preferred value, we'll
# pick that one.
# Mostly this does what you want. For filtering on colors for flags it alllows for the query:
# SELECT title FROM wikidata WHERE properties @> '{"color": ["Green", "Red", "White"]}'
# However, if you'd want all flags that have Blue in them, you'd have to check for just "Blue"
# and also ["Blue"].
for prop_id, claims in d['claims'].items():
prop_name = id_name_map.get(prop_id)
if prop_name:
ranks = defaultdict(list)
for claim in claims:
mainsnak = claim.get('mainsnak')
if mainsnak:
data_value = map_value(mainsnak.get('datavalue'), id_name_map)
if data_value:
lst = ranks[claim['rank']]
if mainsnak['datavalue'].get('type') != 'wikibase-entityid':
del lst[:]
lst.append(data_value)
for r in 'preferred', 'normal', 'depricated':
value = ranks[r]
if value:
if len(value) == 1:
value = value[0]
else:
value = sorted(value)
properties[prop_name] = value
break
rec += 1
cursor.execute(
'INSERT INTO wikidata (wikipedia_id, title, wikidata_id, description, properties) VALUES (%s, %s, %s, %s, %s)',
(wikipedia_id, title, wikidata_id, description, extras.Json(properties)),
)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Import wikidata into postgress')
parser.add_argument('--postgres', type=str, help='postgres connection string')
parser.add_argument('dump', type=str, help='BZipped wikidata dump')
args = parser.parse_args()
conn, cursor = setup_db(args.postgres)
main(args.dump, cursor)
conn.commit()
| 37.266272
| 127
| 0.539695
|
ccf7383ec2f2bb8a3a9863736ff4b8c36fdf495d
| 3,069
|
py
|
Python
|
nicos_sinq/devices/epics/generic.py
|
mlz-ictrl/nicos
|
a6de0bc194ba42e3dc04a033713b41b5499ba8e1
|
[
"CC-BY-3.0",
"Apache-2.0",
"CC-BY-4.0"
] | 12
|
2019-11-06T15:40:36.000Z
|
2022-01-01T16:23:00.000Z
|
nicos_sinq/devices/epics/generic.py
|
ess-dmsc/nicos
|
755d61d403ff7123f804c45fc80c7ff4d762993b
|
[
"CC-BY-3.0",
"Apache-2.0",
"CC-BY-4.0"
] | 91
|
2020-08-18T09:20:26.000Z
|
2022-02-01T11:07:14.000Z
|
nicos_sinq/devices/epics/generic.py
|
mlz-ictrl/nicos
|
a6de0bc194ba42e3dc04a033713b41b5499ba8e1
|
[
"CC-BY-3.0",
"Apache-2.0",
"CC-BY-4.0"
] | 6
|
2020-01-11T10:52:30.000Z
|
2022-02-25T12:35:23.000Z
|
# -*- coding: utf-8 -*-
# *****************************************************************************
# NICOS, the Networked Instrument Control System of the MLZ
# Copyright (c) 2009-2021 by the NICOS contributors (see AUTHORS)
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation; either version 2 of the License, or (at your option) any later
# version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# Module authors:
# Mark Koennecke <mark.koennecke@psi.ch>
#
# *****************************************************************************
import epics
from nicos.core import CommunicationError, Override, Param, status
from nicos.core.mixins import HasLimits
from nicos.devices.epics import EpicsMoveable, EpicsReadable
class WindowMoveable(HasLimits, EpicsMoveable):
"""
Some devices do not have a way to determine their status. The only way
to test for completion is to read the value back and test if it is
within a certain window of the target. This is done here.
"""
parameters = {
'window': Param('Tolerance used for testing for completion',
type=float,
mandatory=True)
}
parameter_overrides = {
'target': Override(settable=True),
}
valuetype = float
_driveTarget = None
def doStart(self, target):
# I have to use my private _driveTarget as the target
# attribute is marked volatile in EpicsMoveable and is
# not holding the real target.
self._driveTarget = target
EpicsMoveable.doStart(self, target)
def doStatus(self, maxage=0):
pos = self.doRead(0)
if self._driveTarget:
if abs(pos - self._driveTarget) < self.window:
self._driveTarget = None
return status.OK, 'Done'
else:
return status.BUSY, 'Moving'
return status.OK, 'Done'
class EpicsArrayReadable(EpicsReadable):
parameters = {
'count': Param('How many array values to read',
type=int, mandatory=True),
}
def doRead(self, maxage=0):
if epics.ca.current_context() is None:
epics.ca.use_initial_context()
result = self._pvs['readpv'].get(timeout=self.epicstimeout,
count=self.count)
if result is None: # timeout
raise CommunicationError(self, 'timed out getting PV %r from EPICS'
% self._get_pv_name('readpv'))
return result
| 36.105882
| 79
| 0.61551
|
899cebc396f005bebe343ad3023cf19c5738a52b
| 1,306
|
py
|
Python
|
workspace/app/webapps/auth/route_login.py
|
r14r/FastAPI_App_Jobboard
|
a5ed77f51d94e45d4b3f3869a10fa0359aaad2f4
|
[
"MIT"
] | null | null | null |
workspace/app/webapps/auth/route_login.py
|
r14r/FastAPI_App_Jobboard
|
a5ed77f51d94e45d4b3f3869a10fa0359aaad2f4
|
[
"MIT"
] | null | null | null |
workspace/app/webapps/auth/route_login.py
|
r14r/FastAPI_App_Jobboard
|
a5ed77f51d94e45d4b3f3869a10fa0359aaad2f4
|
[
"MIT"
] | null | null | null |
from apis.version1.route_login import login_for_access_token
from db.session import get_db
from fastapi import APIRouter
from fastapi import Depends
from fastapi import HTTPException
from fastapi import Request
from fastapi.templating import Jinja2Templates
from sqlalchemy.orm import Session
from webapps.auth.forms import LoginForm
templates = Jinja2Templates(directory="templates")
router = APIRouter(include_in_schema=False)
@router.get("/login/")
def login(request: Request):
return templates.TemplateResponse("auth/login.html", {"request": request})
@router.post("/login/")
async def login(request: Request, db: Session = Depends(get_db)):
form = LoginForm(request)
await form.load_data()
if await form.is_valid():
try:
form.__dict__.update(msg="Login Successful :)")
response = templates.TemplateResponse("auth/login.html", form.__dict__)
login_for_access_token(response=response, form_data=form, db=db)
return response
except HTTPException:
form.__dict__.update(msg="")
form.__dict__.get("errors").append("Incorrect Email or Password")
return templates.TemplateResponse("auth/login.html", form.__dict__)
return templates.TemplateResponse("auth/login.html", form.__dict__)
| 36.277778
| 83
| 0.730475
|
2ebe9b9b23c6ae62a29610d30f7cf65789efe729
| 4,303
|
py
|
Python
|
youtube8m/utils/metrics/mean_average_precision_calculator.py
|
twoleggedeye/youtube8m
|
c640c5052054ef50eb2f040889a341d3afbdf010
|
[
"MIT"
] | 15
|
2018-09-06T12:52:47.000Z
|
2021-05-28T08:01:45.000Z
|
youtube8m/utils/metrics/mean_average_precision_calculator.py
|
twoleggedeye/youtube8m
|
c640c5052054ef50eb2f040889a341d3afbdf010
|
[
"MIT"
] | null | null | null |
youtube8m/utils/metrics/mean_average_precision_calculator.py
|
twoleggedeye/youtube8m
|
c640c5052054ef50eb2f040889a341d3afbdf010
|
[
"MIT"
] | 2
|
2019-07-11T19:31:19.000Z
|
2020-03-10T04:13:52.000Z
|
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Calculate the mean average precision.
It provides an interface for calculating mean average precision
for an entire list or the top-n ranked items.
Example usages:
We first call the function accumulate many times to process parts of the ranked
list. After processing all the parts, we call peek_map_at_n
to calculate the mean average precision.
```
import random
p = np.array([[random.random() for _ in xrange(50)] for _ in xrange(1000)])
a = np.array([[random.choice([0, 1]) for _ in xrange(50)]
for _ in xrange(1000)])
# mean average precision for 50 classes.
calculator = mean_average_precision_calculator.MeanAveragePrecisionCalculator(
num_class=50)
calculator.accumulate(p, a)
aps = calculator.peek_map_at_n()
```
"""
from youtube8m.utils.metrics import average_precision_calculator
class MeanAveragePrecisionCalculator(object):
"""This class is to calculate mean average precision.
"""
def __init__(self, num_class):
"""Construct a calculator to calculate the (macro) average precision.
Args:
num_class: A positive Integer specifying the number of classes.
top_n_array: A list of positive integers specifying the top n for each
class. The top n in each class will be used to calculate its average
precision at n.
The size of the array must be num_class.
Raises:
ValueError: An error occurred when num_class is not a positive integer;
or the top_n_array is not a list of positive integers.
"""
if not isinstance(num_class, int) or num_class <= 1:
raise ValueError("num_class must be a positive integer.")
self._ap_calculators = [] # member of AveragePrecisionCalculator
self._num_class = num_class # total number of classes
for i in range(num_class):
self._ap_calculators.append(
average_precision_calculator.AveragePrecisionCalculator())
def accumulate(self, predictions, actuals, num_positives=None):
"""Accumulate the predictions and their ground truth labels.
Args:
predictions: A list of lists storing the prediction scores. The outer
dimension corresponds to classes.
actuals: A list of lists storing the ground truth labels. The dimensions
should correspond to the predictions input. Any value
larger than 0 will be treated as positives, otherwise as negatives.
num_positives: If provided, it is a list of numbers representing the
number of true positives for each class. If not provided, the number of
true positives will be inferred from the 'actuals' array.
Raises:
ValueError: An error occurred when the shape of predictions and actuals
does not match.
"""
if not num_positives:
num_positives = [None for i in predictions.shape[1]]
calculators = self._ap_calculators
for i in range(len(predictions)):
calculators[i].accumulate(predictions[i], actuals[i], num_positives[i])
def clear(self):
for calculator in self._ap_calculators:
calculator.clear()
def is_empty(self):
return ([calculator.heap_size for calculator in self._ap_calculators] ==
[0 for _ in range(self._num_class)])
def peek_map_at_n(self):
"""Peek the non-interpolated mean average precision at n.
Returns:
An array of non-interpolated average precision at n (default 0) for each
class.
"""
aps = [self._ap_calculators[i].peek_ap_at_n()
for i in range(self._num_class)]
return aps
| 38.419643
| 83
| 0.688125
|
81b896b6dcd70ef155f3a5ab20b0869292f05c0e
| 7,633
|
py
|
Python
|
scripts/optimization viz/opossum_log_to_mp4.py
|
ksteinfe/fresh_eyes
|
db0cdf2a77d5e2df7157c022aa7a620ec15cac34
|
[
"MIT"
] | 4
|
2019-09-20T23:43:21.000Z
|
2021-12-08T15:27:00.000Z
|
scripts/optimization viz/opossum_log_to_mp4.py
|
ksteinfe/fresh_eyes
|
db0cdf2a77d5e2df7157c022aa7a620ec15cac34
|
[
"MIT"
] | null | null | null |
scripts/optimization viz/opossum_log_to_mp4.py
|
ksteinfe/fresh_eyes
|
db0cdf2a77d5e2df7157c022aa7a620ec15cac34
|
[
"MIT"
] | 2
|
2019-09-21T13:56:46.000Z
|
2020-06-28T01:43:02.000Z
|
import glob, os, shutil, tempfile, argparse
from PIL import Image, ImageFont, ImageDraw
from zipfile import ZipFile
import imageio
import numpy as np
frames_per_sec = 24
show_local_best = True
LOCAL_BEST_RANGE = frames_per_sec * 3 # range of past iterations to find local best. compare to frames_per_sec
FONT = ImageFont.truetype("fonts/OpenSans-Light.ttf", 12)
PAD = 10
SRC_EXT = "jpg"
LAYOUT_STYLE = 0
def main(pth_zip, pth_dst):
pth_mp4 = os.path.join(pth_dst, "{}.mp4".format(os.path.splitext(os.path.split(pth_zip)[1])[0])) # output_filename mimics zip file name
print ("will save an MP4 file to {}".format(pth_mp4))
with tempfile.TemporaryDirectory() as pth_unzip:
print("unzipping {}".format(pth_zip))
with ZipFile(pth_zip, 'r') as zf: zf.extractall(pth_unzip)
print("done unzipping.")
try:
# the first TXT file we come across in the zip file root must be the log file
pth_log = next(os.path.join(pth_unzip, file) for file in os.listdir(pth_unzip) if os.path.isfile(os.path.join(pth_unzip, file)) and file.endswith(".txt"))
print("will use {} as the log file.".format(os.path.basename(pth_log)))
except StopIteration:
print("NO LOG FILE FOUND - are there TXT files in this ZIP file?")
exit()
with open(pth_log) as f: log_data = f.readlines()
log_data = [ln.strip().split(' ') for ln in log_data if ln.strip() != ""]
fitnesses = [float(ln[-1]) for ln in log_data]
timestamps = [ln[1] for ln in log_data]
src_imgs = [os.path.join(pth_unzip,f) for f in os.listdir(pth_unzip) if f.endswith(SRC_EXT)]
viewnames = list(set([os.path.basename(pth).split('-')[-2] for pth in src_imgs])) # this is dependent on proper filename convention
print("Found {} views: {}".format(len(viewnames), viewnames))
rankings = ranking_per_frame(fitnesses)
#for n, rank in enumerate(rankings): print(n, rank)
im_size = Image.open(src_imgs[0]).size
viewframes = {}
for viewname in viewnames:
viewframes[viewname] = viewport_frames(viewname, [pth for pth in src_imgs if viewname in pth], rankings, fitnesses, im_size)
#print(frames_test)
fitness_text_format = "{0:.3%}"
text_size = FONT.getsize(fitness_text_format.format(99))
text_ofst_w = im_size[0]/2.0 - text_size[0]/2.0
text_ofst_h = text_size[1] * 2
font_color = (50,50,50)
frames = []
for n in range(len(rankings)):
fit_crnt = fitnesses[n]
fit_glbl = rankings[n]['fit_glbl']
fit_locl = rankings[n]['fit_locl']
img = False
if LAYOUT_STYLE == 0:
img = Image.new('RGB', (im_size[0]*3 + PAD*4, (im_size[1]*len(viewframes)) + PAD*(len(viewframes)+1) + text_size[1] ) , (255,255,255))
for row, name in enumerate(viewframes):
img.paste(viewframes[name][n],(0,row*(im_size[1])+((row+1)*PAD) ))
draw = ImageDraw.Draw(img)
draw.text(( (0*im_size[0])+(PAD*1)+text_ofst_w , im_size[1]*len(viewframes)+text_ofst_h ),fitness_text_format.format(fit_crnt),font_color,font=FONT)
draw.text(( (1*im_size[0])+(PAD*2)+text_ofst_w , im_size[1]*len(viewframes)+text_ofst_h ),fitness_text_format.format(fit_locl),font_color,font=FONT)
draw.text(( (2*im_size[0])+(PAD*3)+text_ofst_w , im_size[1]*len(viewframes)+text_ofst_h ),fitness_text_format.format(fit_glbl),font_color,font=FONT)
else:
raise NotImplementedError("layout style {} is not ready".format(LAYOUT_STYLE))
if img: frames.append(img)
print("saving mp4 file to {}".format(pth_mp4))
writer = imageio.get_writer(pth_mp4, fps=frames_per_sec)
for im in frames: writer.append_data(np.array(im))
writer.close()
def ranking_per_frame(fits):
rankings = [{'idx_glbl':0, 'idx_locl':0, 'fit_glbl':fits[0], 'fit_locl':fits[0]}]
for n in range(1,len(fits)):
glb = sorted([(f,i) for i,f in enumerate(fits[:n])])
lcl = sorted([(f,max(0,n-LOCAL_BEST_RANGE) + i) for i,f in enumerate(fits[max(0,n-LOCAL_BEST_RANGE):n])])
rankings.append({'idx_glbl':glb[-1][1], 'idx_locl':lcl[-1][1], 'fit_glbl':glb[-1][0], 'fit_locl':lcl[-1][0]})
return rankings
def viewport_frames(name, pth_imgs, ranks, fits, im_size):
if len(pth_imgs) - len(ranks) == 1:
print("There is one more images than there are log entries.\n\tThis is usually because Opossum displays the most fit candidate at the end of the optimization.\n\tWill remove the last image and continue.")
pth_imgs = pth_imgs[:-1]
if len(pth_imgs) != len(ranks):
raise Exception("Number of images for this viewport in ZIP ({}) don't match the number of log entries ({})".format(len(pth_imgs),len(ranks)))
print("cutting frames for {} with {} images and {} log entries".format(name,len(pth_imgs),len(ranks)))
w,h = im_size[0],im_size[1]
fitness_text_format = "{0:.4f}%"
text_size = FONT.getsize(fitness_text_format.format(99))
text_ofst_w = w/2.0 - text_size[0]/2.0
text_ofst_h = text_size[1] * 1.5
font_color = (50,50,50)
frames = []
for n in range(len(ranks)):
img_crnt, fit_crnt = Image.open(pth_imgs[n]), fits[n]
img_glbl, fit_glbl = Image.open(pth_imgs[ranks[n]['idx_glbl']]), ranks[n]['fit_glbl']
img_locl, fit_locl = Image.open(pth_imgs[ranks[n]['idx_locl']]), ranks[n]['fit_locl']
img = False
if LAYOUT_STYLE == 0:
img = Image.new('RGB', ((w+PAD)*3, h), (255,255,255))
img.paste(img_crnt,(PAD,0))
img.paste(img_locl,(w+PAD*2,0))
img.paste(img_glbl,(w*2+PAD*3,0))
#draw = ImageDraw.Draw(img)
#draw.text(( (0*w)+text_ofst_w , h-text_ofst_h ),fitness_text_format.format(fit_crnt),font_color,font=FONT)
#draw.text(( (1*w)+text_ofst_w , h-text_ofst_h ),fitness_text_format.format(fit_locl),font_color,font=FONT)
#draw.text(( (2*w)+text_ofst_w , h-text_ofst_h ),fitness_text_format.format(fit_glbl),font_color,font=FONT)
elif LAYOUT_STYLE == 1:
img = Image.new('RGB', (w, h*3), (255,255,255))
img.paste(img_crnt,(0,0))
img.paste(img_locl,(0,h))
img.paste(img_glbl,(0,h*2))
raise NotImplementedError("not done with vertical style")
else:
raise NotImplementedError("layout style {} is not ready".format(LAYOUT_STYLE))
if img: frames.append(img)
else:
raise ValueError("what happened here? error in creating viewport frame image")
return frames
if __name__ == '__main__' and __package__ is None:
# ---- FEUTIL ---- #
from os import sys, path
sys.path.append(path.dirname(path.dirname(path.abspath(__file__)))) # add grandparent folder to the module search path
import _fresh_eyes_script_utilities as feu # import fresh eyes fe_util
# ---- FEUTIL ---- #
# ---- ARGPARSE ---- #
parser = argparse.ArgumentParser()
parser.add_argument('zip_path', type=feu.files.is_filepath_or_tempfile, help="path at which to find a ZIP file containing images and Opossum log file.")
#parser.add_argument('destination_path', help="path at which to save resulting MP4", nargs='?', default=os.getcwd())
args = parser.parse_args()
# ---- ARGPARSE ---- #
pth_zip = os.path.abspath(args.zip_path)
pth_dst = os.path.dirname(pth_zip)
#print(args)
main(pth_zip, pth_dst)
| 44.9
| 212
| 0.635268
|
dcdda17eaf4447e9a6dfdf8405076df718897cb2
| 88
|
py
|
Python
|
simplemonitor/version.py
|
aikitori/simplemonitor
|
432bdf90d67681f2aed6f51f26546371160f2227
|
[
"BSD-3-Clause"
] | 373
|
2015-12-21T02:39:21.000Z
|
2022-03-08T10:49:43.000Z
|
simplemonitor/version.py
|
aikitori/simplemonitor
|
432bdf90d67681f2aed6f51f26546371160f2227
|
[
"BSD-3-Clause"
] | 910
|
2015-10-13T08:16:38.000Z
|
2022-03-29T12:16:52.000Z
|
simplemonitor/version.py
|
aikitori/simplemonitor
|
432bdf90d67681f2aed6f51f26546371160f2227
|
[
"BSD-3-Clause"
] | 196
|
2015-03-24T19:15:42.000Z
|
2022-02-06T22:39:55.000Z
|
import pkg_resources
VERSION = pkg_resources.get_distribution("simplemonitor").version
| 22
| 65
| 0.852273
|
3030b091442a66a1758237c47ca2d3b1d6082b1e
| 368
|
py
|
Python
|
core/models.py
|
dilshodbekikromov/weatherapp
|
105ca8c7a142a4afdff9bff7c758896f0335610c
|
[
"PostgreSQL"
] | 1
|
2021-12-17T05:17:22.000Z
|
2021-12-17T05:17:22.000Z
|
core/models.py
|
GiovannaK/Weather-app-django
|
3138f03b34baa5866ed63f52dc4b759be6eb7aa4
|
[
"MIT"
] | null | null | null |
core/models.py
|
GiovannaK/Weather-app-django
|
3138f03b34baa5866ed63f52dc4b759be6eb7aa4
|
[
"MIT"
] | null | null | null |
from django.db import models
from django.contrib.auth.models import User
class City(models.Model):
user = models.ManyToManyField(User, default=None,)
name = models.CharField(max_length=255, verbose_name="Nome da cidade")
def __str__(self):
return self.name
class Meta:
verbose_name = "Cidade"
verbose_name_plural = 'Cidades'
| 26.285714
| 74
| 0.701087
|
3dd4834d518a47313b12889c0918b820180957d4
| 2,459
|
py
|
Python
|
ros2_control_demo_bringup/launch/rrbot_system_multi_interface.launch.py
|
ZhanfengZhou/ros2_control_demos
|
1db6634a0ad193b130cc07d66f5baee894658bf0
|
[
"Apache-2.0"
] | null | null | null |
ros2_control_demo_bringup/launch/rrbot_system_multi_interface.launch.py
|
ZhanfengZhou/ros2_control_demos
|
1db6634a0ad193b130cc07d66f5baee894658bf0
|
[
"Apache-2.0"
] | null | null | null |
ros2_control_demo_bringup/launch/rrbot_system_multi_interface.launch.py
|
ZhanfengZhou/ros2_control_demos
|
1db6634a0ad193b130cc07d66f5baee894658bf0
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2021 Department of Engineering Cybernetics, NTNU.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from launch import LaunchDescription
from launch.actions import DeclareLaunchArgument, IncludeLaunchDescription
from launch.launch_description_sources import PythonLaunchDescriptionSource
from launch.substitutions import LaunchConfiguration, ThisLaunchFileDir
def generate_launch_description():
# Declare arguments
declared_arguments = []
declared_arguments.append(
DeclareLaunchArgument(
"prefix",
default_value='""',
description="Prefix of the joint names, useful for \
multi-robot setup. If changed than also joint names in the controllers' configuration \
have to be updated.",
)
)
declared_arguments.append(
DeclareLaunchArgument(
"slowdown", default_value="50.0", description="Slowdown factor of the RRbot."
)
)
declared_arguments.append(
DeclareLaunchArgument(
"robot_controller",
default_value="forward_velocity_controller",
description="Robot controller to start.",
)
)
# Initialize Arguments
prefix = LaunchConfiguration("prefix")
slowdown = LaunchConfiguration("slowdown")
robot_controller = LaunchConfiguration("robot_controller")
base_launch = IncludeLaunchDescription(
PythonLaunchDescriptionSource([ThisLaunchFileDir(), "/rrbot_base.launch.py"]),
launch_arguments={
"controllers_file": "rrbot_multi_interface_forward_controllers.yaml",
"description_file": "rrbot_system_multi_interface.urdf.xacro",
"prefix": prefix,
"use_fake_hardware": "false",
"fake_sensor_commands": "false",
"slowdown": slowdown,
"robot_controller": robot_controller,
}.items(),
)
return LaunchDescription(declared_arguments + [base_launch])
| 37.830769
| 95
| 0.701911
|
fb1e981a88abdef24c09b78de9ef8f83ed4324c0
| 1,328
|
py
|
Python
|
src/dsa/misc/utils/bit_manipulation.py
|
tvatter/dsa
|
e5ae217e38441d90914a55103e23d86f5821dc2f
|
[
"MIT"
] | null | null | null |
src/dsa/misc/utils/bit_manipulation.py
|
tvatter/dsa
|
e5ae217e38441d90914a55103e23d86f5821dc2f
|
[
"MIT"
] | null | null | null |
src/dsa/misc/utils/bit_manipulation.py
|
tvatter/dsa
|
e5ae217e38441d90914a55103e23d86f5821dc2f
|
[
"MIT"
] | null | null | null |
def get_bit(num, bit):
return (num & (1 << bit)) != 0
def set_bit(num, bit):
return num | (1 << bit)
def clear_bit(num, bit):
return num & ~(1 << bit)
def clear_bits(num, bit, from_msb=True):
if from_msb:
return num & ~(~0 << bit)
else:
return num & (~0 << (bit + 1))
# def update_bit(num, bit, is_one=True):
# if is_one:
# return set_bit(num, bit)
# else:
# return clear_bit(num, bit)
def update_bit(num, bit, value=1):
mask = ~(1 << bit)
return (num & mask) | (value << bit)
def add(a, b, bits=32):
res, carry = a ^ b, (a & b) << 1
mask = ~(~0 << bits)
while (carry & mask) != 0:
res, carry = res ^ carry, (res & carry) << 1
return res & mask if carry > 0 else res
# def substract(a, b, bits=32):
# return add(a, add(~b, 1, bits), bits)
def substract(a, b, bits=32):
res, borrow = a ^ b, (~a & b) << 1
mask = ~(~0 << bits)
while (borrow & mask) != 0:
res, borrow = res ^ borrow, (~res & borrow) << 1
return res | ~mask if borrow > 0 else res
def to_bin(num):
num, digit = divmod(num, 2)
res = str(int(digit))
while num > 0:
num, digit = divmod(num, 2)
res += str(digit)
return res[::-1]
def is_power_of_two(num):
return (num & (num - 1)) == 0
def trailing_zeros(num):
return 0 if num == 0 else (num & -num).bit_length() - 1
| 19.820896
| 57
| 0.557982
|
26cdd5a09abde63d60a8ab294726b07347f2b90f
| 1,054
|
py
|
Python
|
database/information/KEY_COLUMN_USAGE.py
|
mshobair/invitro_cheminformatics
|
17201496c73453accd440646a1ee81726119a59c
|
[
"MIT"
] | null | null | null |
database/information/KEY_COLUMN_USAGE.py
|
mshobair/invitro_cheminformatics
|
17201496c73453accd440646a1ee81726119a59c
|
[
"MIT"
] | null | null | null |
database/information/KEY_COLUMN_USAGE.py
|
mshobair/invitro_cheminformatics
|
17201496c73453accd440646a1ee81726119a59c
|
[
"MIT"
] | null | null | null |
import datetime
from database.database_schemas import Schemas
from sqlalchemy import Column, ForeignKey, Integer, String, DateTime
from sqlalchemy.dialects.mysql import BIGINT, SMALLINT, DOUBLE, TIMESTAMP, TINYINT
from database.base import Base
class KeyColumnUsage(Base):
"""Maps to KEY_COLUMN_USAGE table in information databases."""
__tablename__ = 'KEY_COLUMN_USAGE'
__table_args__ = {'schema': Schemas.information_schema}
CONSTRAINT_CATALOG = Column(String, nullable=False)
CONSTRAINT_SCHEMA = Column(String, nullable=False)
CONSTRAINT_NAME = Column(String, nullable=False)
TABLE_CATALOG = Column(String, nullable=False)
TABLE_SCHEMA = Column(String, nullable=False)
TABLE_NAME = Column(String, nullable=False)
COLUMN_NAME = Column(String, nullable=False)
ORDINAL_POSITION = Column(BIGINT, nullable=False, default=0)
POSITION_IN_UNIQUE_CONSTRAINT = Column(BIGINT)
REFERENCED_TABLE_SCHEMA = Column(String)
REFERENCED_TABLE_NAME = Column(String)
REFERENCED_COLUMN_NAME = Column(String)
| 39.037037
| 82
| 0.774194
|
719284a622da8b17f49de3f210bbfab25e7cec98
| 1,116
|
py
|
Python
|
Backend/config/urls.py
|
md-msig/Django-React-Blog-1
|
264c410f9fc31044f125cfc3978e7799eabe0576
|
[
"Apache-2.0"
] | null | null | null |
Backend/config/urls.py
|
md-msig/Django-React-Blog-1
|
264c410f9fc31044f125cfc3978e7799eabe0576
|
[
"Apache-2.0"
] | null | null | null |
Backend/config/urls.py
|
md-msig/Django-React-Blog-1
|
264c410f9fc31044f125cfc3978e7799eabe0576
|
[
"Apache-2.0"
] | null | null | null |
"""Blog URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.9/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url,include, path
from django.contrib import admin
from v1.accounts.views import FacebookLogin
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^blog/api/',include('v1.post.urls'),name='blog'),
url(r'^accounts/api/',include('v1.accounts.urls')),
url(r'^accounts/', include('allauth.urls')),
url(r'^rest-auth/', include('rest_auth.urls')),
url(r'^rest-auth/facebook/$', FacebookLogin.as_view(), name='fb_login')
]
| 37.2
| 79
| 0.689068
|
76653c09967ad1b9cde868f188f75ec582c81418
| 355
|
py
|
Python
|
12_Nguyen_Lam_Manh_Tuyen/1.4.py
|
lpython2006e/exercies
|
84343eae57d86708a7984aa02f77183a4688a508
|
[
"MIT"
] | null | null | null |
12_Nguyen_Lam_Manh_Tuyen/1.4.py
|
lpython2006e/exercies
|
84343eae57d86708a7984aa02f77183a4688a508
|
[
"MIT"
] | null | null | null |
12_Nguyen_Lam_Manh_Tuyen/1.4.py
|
lpython2006e/exercies
|
84343eae57d86708a7984aa02f77183a4688a508
|
[
"MIT"
] | 8
|
2020-07-10T14:13:54.000Z
|
2020-08-03T08:17:50.000Z
|
#Write a program that asks the user for a number n and prints the sum of the numbers 1 to n
start=1
print("Please input your number")
end=input()
sum=0
while end.isdigit()==False:
print("Your input is not a valid number, please try again")
end=input()
for i in range(start,int(end)+1):
sum=sum+i
print("Sum from 1 to {} is {}".format(end,sum))
| 32.272727
| 91
| 0.692958
|
c6dc7cb74aeb684ffd1c1a5ffb4353ea67b30b0c
| 25,965
|
py
|
Python
|
venv/lib/python3.6/site-packages/ansible_collections/fortinet/fortimanager/plugins/modules/fmgr_dvm_cmd_add_device.py
|
usegalaxy-no/usegalaxy
|
75dad095769fe918eb39677f2c887e681a747f3a
|
[
"MIT"
] | 1
|
2020-01-22T13:11:23.000Z
|
2020-01-22T13:11:23.000Z
|
venv/lib/python3.6/site-packages/ansible_collections/fortinet/fortimanager/plugins/modules/fmgr_dvm_cmd_add_device.py
|
usegalaxy-no/usegalaxy
|
75dad095769fe918eb39677f2c887e681a747f3a
|
[
"MIT"
] | 12
|
2020-02-21T07:24:52.000Z
|
2020-04-14T09:54:32.000Z
|
venv/lib/python3.6/site-packages/ansible_collections/fortinet/fortimanager/plugins/modules/fmgr_dvm_cmd_add_device.py
|
usegalaxy-no/usegalaxy
|
75dad095769fe918eb39677f2c887e681a747f3a
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python
from __future__ import absolute_import, division, print_function
# Copyright 2019-2021 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
__metaclass__ = type
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.1'}
DOCUMENTATION = '''
---
module: fmgr_dvm_cmd_add_device
short_description: Add a device to the Device Manager database.
description:
- This module is able to configure a FortiManager device.
- Examples include all parameters and values which need to be adjusted to data sources before usage.
version_added: "2.10"
author:
- Link Zheng (@chillancezen)
- Jie Xue (@JieX19)
- Frank Shen (@fshen01)
- Hongbin Lu (@fgtdev-hblu)
notes:
- Running in workspace locking mode is supported in this FortiManager module, the top
level parameters workspace_locking_adom and workspace_locking_timeout help do the work.
- To create or update an object, use state present directive.
- To delete an object, use state absent directive.
- Normally, running one module can fail when a non-zero rc is returned. you can also override
the conditions to fail or succeed with parameters rc_failed and rc_succeeded
options:
enable_log:
description: Enable/Disable logging for task
required: false
type: bool
default: false
bypass_validation:
description: only set to True when module schema diffs with FortiManager API structure, module continues to execute without validating parameters
required: false
type: bool
default: false
workspace_locking_adom:
description: the adom to lock for FortiManager running in workspace mode, the value can be global and others including root
required: false
type: str
workspace_locking_timeout:
description: the maximum time in seconds to wait for other user to release the workspace lock
required: false
type: int
default: 300
rc_succeeded:
description: the rc codes list with which the conditions to succeed will be overriden
type: list
required: false
rc_failed:
description: the rc codes list with which the conditions to fail will be overriden
type: list
required: false
dvm_cmd_add_device:
description: the top level parameters set
required: false
type: dict
suboptions:
adom:
type: str
description: 'Name or ID of the ADOM where the command is to be executed on.'
device:
description: no description
type: dict
required: false
suboptions:
adm_pass:
description: no description
type: str
adm_usr:
type: str
description: '<i>add real and promote device</i>.'
desc:
type: str
description: '<i>available for all operations</i>.'
device action:
type: str
description:
- 'Specify add device operations, or leave blank to add real device:'
- '"add_model" - add a model device.'
- '"promote_unreg" - promote an unregistered device to be managed by FortiManager using information from database.'
faz.quota:
type: int
description: '<i>available for all operations</i>.'
ip:
type: str
description: '<i>add real device only</i>. Add device will probe with this IP using the log in credential specified.'
meta fields:
description: no description
type: dict
mgmt_mode:
type: str
description: '<i>add real and model device</i>.'
choices:
- 'unreg'
- 'fmg'
- 'faz'
- 'fmgfaz'
mr:
type: int
description: '<i>add model device only</i>.'
name:
type: str
description: '<i>required for all operations</i>. Unique name for the device.'
os_type:
type: str
description: '<i>add model device only</i>.'
choices:
- 'unknown'
- 'fos'
- 'fsw'
- 'foc'
- 'fml'
- 'faz'
- 'fwb'
- 'fch'
- 'fct'
- 'log'
- 'fmg'
- 'fsa'
- 'fdd'
- 'fac'
- 'fpx'
- 'fna'
os_ver:
type: str
description: 'os version'
choices:
- 'unknown'
- '0.0'
- '1.0'
- '2.0'
- '3.0'
- '4.0'
- '5.0'
- '6.0'
patch:
type: int
description: '<i>add model device only</i>.'
platform_str:
type: str
description: '<i>add model device only</i>. Required for determine the platform for VM platforms.'
sn:
type: str
description: '<i>add model device only</i>. This attribute will be used to determine the device platform, except for VM platforms, w...'
flags:
description: no description
type: list
choices:
- none
- create_task
- nonblocking
- log_dev
groups:
description: no description
type: list
suboptions:
name:
type: str
description: no description
vdom:
type: str
description: no description
'''
EXAMPLES = '''
- name: Add a FOS device to FMG
hosts: fortimanager01
gather_facts: no
connection: httpapi
collections:
- fortinet.fortimanager
vars:
ansible_httpapi_use_ssl: True
ansible_httpapi_validate_certs: False
ansible_httpapi_port: 443
fos_user: 'admin'
fos_pass: 'password'
fos_ip: '192.168.190.151'
tasks:
- name: discover device
fmgr_dvm_cmd_discover_device:
bypass_validation: True
dvm_cmd_discover_device:
device:
adm_pass: '{{ fos_pass }}'
adm_usr: '{{ fos_user }}'
ip: '{{ fos_ip }}'
register: probed_device
- name: add device
fmgr_dvm_cmd_add_device:
bypass_validation: True
dvm_cmd_add_device:
adom: 'root'
flags:
- 'create_task'
- 'nonblocking'
device:
adm_usr: '{{ probed_device.meta.response_data.device.adm_usr }}'
adm_pass: '{{ probed_device.meta.response_data.device.adm_pass }}'
desc: 'The device is added via FortiManager Ansible'
ip: '{{ probed_device.meta.response_data.device.ip }}'
mgmt_mode: 'fmg'
name: '{{ probed_device.meta.response_data.device.name }}'
sn: '{{ probed_device.meta.response_data.device.sn }}'
register: installing_task
- name: poll the task
fmgr_fact:
facts:
selector: 'task_task'
params:
task: '{{installing_task.meta.response_data.taskid}}'
register: taskinfo
until: taskinfo.meta.response_data.percent == 100
retries: 30
delay: 5
failed_when: taskinfo.meta.response_data.state == 'error' and 'devsnexist' not in taskinfo.meta.response_data.line[0].detail
- hosts: fortimanager00
collections:
- fortinet.fortimanager
connection: httpapi
vars:
ansible_httpapi_use_ssl: True
ansible_httpapi_validate_certs: False
ansible_httpapi_port: 443
tasks:
- name: Add a device to the Device Manager database.
fmgr_dvm_cmd_add_device:
bypass_validation: False
dvm_cmd_add_device:
adom: ansible
device:
adm_pass: fortinet # device password
adm_usr: admin # device user name
ip: 0.0.0.0 # device ip
mgmt_mode: fmg #<value in [unreg, fmg, faz, ...]>
name: FGT_AWS
flags:
- none
- create_task
- nonblocking
- log_dev
'''
RETURN = '''
request_url:
description: The full url requested
returned: always
type: str
sample: /sys/login/user
response_code:
description: The status of api request
returned: always
type: int
sample: 0
response_message:
description: The descriptive message of the api response
type: str
returned: always
sample: OK.
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.connection import Connection
from ansible_collections.fortinet.fortimanager.plugins.module_utils.napi import NAPIManager
from ansible_collections.fortinet.fortimanager.plugins.module_utils.napi import check_galaxy_version
from ansible_collections.fortinet.fortimanager.plugins.module_utils.napi import check_parameter_bypass
def main():
jrpc_urls = [
'/dvm/cmd/add/device'
]
perobject_jrpc_urls = [
'/dvm/cmd/add/device/{device}'
]
url_params = []
module_arg_spec = {
'enable_log': {
'type': 'bool',
'required': False,
'default': False
},
'forticloud_access_token': {
'type': 'str',
'required': False,
'no_log': True
},
'bypass_validation': {
'type': 'bool',
'required': False,
'default': False
},
'workspace_locking_adom': {
'type': 'str',
'required': False
},
'workspace_locking_timeout': {
'type': 'int',
'required': False,
'default': 300
},
'rc_succeeded': {
'required': False,
'type': 'list'
},
'rc_failed': {
'required': False,
'type': 'list'
},
'dvm_cmd_add_device': {
'required': False,
'type': 'dict',
'revision': {
'6.0.0': True,
'6.2.1': True,
'6.2.3': True,
'6.2.5': True,
'6.4.0': True,
'6.4.2': True,
'6.4.5': True,
'7.0.0': True
},
'options': {
'adom': {
'required': False,
'revision': {
'6.0.0': True,
'6.2.1': True,
'6.2.3': True,
'6.2.5': True,
'6.4.0': True,
'6.4.2': True,
'6.4.5': True,
'7.0.0': True
},
'type': 'str'
},
'device': {
'required': False,
'type': 'dict',
'options': {
'adm_pass': {
'required': False,
'revision': {
'6.0.0': True,
'6.2.1': True,
'6.2.3': True,
'6.2.5': True,
'6.4.0': True,
'6.4.2': True,
'6.4.5': True,
'7.0.0': True
},
'type': 'str'
},
'adm_usr': {
'required': False,
'revision': {
'6.0.0': True,
'6.2.1': True,
'6.2.3': True,
'6.2.5': True,
'6.4.0': True,
'6.4.2': True,
'6.4.5': True,
'7.0.0': True
},
'type': 'str'
},
'desc': {
'required': False,
'revision': {
'6.0.0': True,
'6.2.1': True,
'6.2.3': True,
'6.2.5': True,
'6.4.0': True,
'6.4.2': True,
'6.4.5': True,
'7.0.0': True
},
'type': 'str'
},
'device action': {
'required': False,
'revision': {
'6.0.0': True,
'6.2.1': True,
'6.2.3': True,
'6.2.5': True,
'6.4.0': True,
'6.4.2': True,
'6.4.5': True,
'7.0.0': True
},
'type': 'str'
},
'faz.quota': {
'required': False,
'revision': {
'6.0.0': True,
'6.2.1': True,
'6.2.3': True,
'6.2.5': True,
'6.4.0': True,
'6.4.2': True,
'6.4.5': True,
'7.0.0': True
},
'type': 'int'
},
'ip': {
'required': False,
'revision': {
'6.0.0': True,
'6.2.1': True,
'6.2.3': True,
'6.2.5': True,
'6.4.0': True,
'6.4.2': True,
'6.4.5': True,
'7.0.0': True
},
'type': 'str'
},
'meta fields': {
'required': False,
'type': 'dict'
},
'mgmt_mode': {
'required': False,
'revision': {
'6.0.0': True,
'6.2.1': True,
'6.2.3': True,
'6.2.5': True,
'6.4.0': True,
'6.4.2': True,
'6.4.5': True,
'7.0.0': True
},
'choices': [
'unreg',
'fmg',
'faz',
'fmgfaz'
],
'type': 'str'
},
'mr': {
'required': False,
'revision': {
'6.0.0': True,
'6.2.1': True,
'6.2.3': True,
'6.2.5': True,
'6.4.0': True,
'6.4.2': True,
'6.4.5': True,
'7.0.0': True
},
'type': 'int'
},
'name': {
'required': False,
'revision': {
'6.0.0': True,
'6.2.1': True,
'6.2.3': True,
'6.2.5': True,
'6.4.0': True,
'6.4.2': True,
'6.4.5': True,
'7.0.0': True
},
'type': 'str'
},
'os_type': {
'required': False,
'revision': {
'6.0.0': True,
'6.2.1': True,
'6.2.3': True,
'6.2.5': True,
'6.4.0': True,
'6.4.2': True,
'6.4.5': True,
'7.0.0': True
},
'choices': [
'unknown',
'fos',
'fsw',
'foc',
'fml',
'faz',
'fwb',
'fch',
'fct',
'log',
'fmg',
'fsa',
'fdd',
'fac',
'fpx',
'fna'
],
'type': 'str'
},
'os_ver': {
'required': False,
'choices': [
'unknown',
'0.0',
'1.0',
'2.0',
'3.0',
'4.0',
'5.0',
'6.0'
],
'type': 'str'
},
'patch': {
'required': False,
'revision': {
'6.0.0': True,
'6.2.1': True,
'6.2.3': True,
'6.2.5': True,
'6.4.0': True,
'6.4.2': True,
'6.4.5': True,
'7.0.0': True
},
'type': 'int'
},
'platform_str': {
'required': False,
'revision': {
'6.0.0': True,
'6.2.1': True,
'6.2.3': True,
'6.2.5': True,
'6.4.0': True,
'6.4.2': True,
'6.4.5': True,
'7.0.0': True
},
'type': 'str'
},
'sn': {
'required': False,
'revision': {
'6.0.0': True,
'6.2.1': True,
'6.2.3': True,
'6.2.5': True,
'6.4.0': True,
'6.4.2': True,
'6.4.5': True,
'7.0.0': True
},
'type': 'str'
}
}
},
'flags': {
'required': False,
'revision': {
'6.0.0': True,
'6.2.1': True,
'6.2.3': True,
'6.2.5': True,
'6.4.0': True,
'6.4.2': True,
'6.4.5': True,
'7.0.0': True
},
'type': 'list',
'choices': [
'none',
'create_task',
'nonblocking',
'log_dev'
]
},
'groups': {
'required': False,
'revision': {
'6.0.0': True,
'6.2.1': True,
'6.2.3': True,
'6.2.5': True,
'6.4.0': True,
'6.4.2': True,
'6.4.5': True,
'7.0.0': True
},
'type': 'list',
'options': {
'name': {
'required': False,
'revision': {
'6.0.0': True,
'6.2.1': True,
'6.2.3': True,
'6.2.5': True,
'6.4.0': True,
'6.4.2': True,
'6.4.5': True,
'7.0.0': True
},
'type': 'str'
},
'vdom': {
'required': False,
'revision': {
'6.0.0': True,
'6.2.1': True,
'6.2.3': True,
'6.2.5': True,
'6.4.0': True,
'6.4.2': True,
'6.4.5': True,
'7.0.0': True
},
'type': 'str'
}
}
}
}
}
}
params_validation_blob = []
check_galaxy_version(module_arg_spec)
module = AnsibleModule(argument_spec=check_parameter_bypass(module_arg_spec, 'dvm_cmd_add_device'),
supports_check_mode=False)
fmgr = None
if module._socket_path:
connection = Connection(module._socket_path)
connection.set_option('enable_log', module.params['enable_log'] if 'enable_log' in module.params else False)
connection.set_option('forticloud_access_token',
module.params['forticloud_access_token'] if 'forticloud_access_token' in module.params else None)
fmgr = NAPIManager(jrpc_urls, perobject_jrpc_urls, None, url_params, module, connection, top_level_schema_name='data')
fmgr.validate_parameters(params_validation_blob)
fmgr.process_exec(argument_specs=module_arg_spec)
else:
module.fail_json(msg='MUST RUN IN HTTPAPI MODE')
module.exit_json(meta=module.params)
if __name__ == '__main__':
main()
| 37.960526
| 160
| 0.342808
|
90d0d698d28c54365c6b974473404e06d9aff1a7
| 9,228
|
py
|
Python
|
netbox/extras/views.py
|
UrosTodorovic/netbox
|
4a0fd6d230b4d3dd39c2555cf573bf72f824c4bd
|
[
"Apache-2.0"
] | null | null | null |
netbox/extras/views.py
|
UrosTodorovic/netbox
|
4a0fd6d230b4d3dd39c2555cf573bf72f824c4bd
|
[
"Apache-2.0"
] | null | null | null |
netbox/extras/views.py
|
UrosTodorovic/netbox
|
4a0fd6d230b4d3dd39c2555cf573bf72f824c4bd
|
[
"Apache-2.0"
] | 1
|
2021-04-09T06:08:21.000Z
|
2021-04-09T06:08:21.000Z
|
from __future__ import unicode_literals
from django import template
from django.contrib import messages
from django.contrib.auth.mixins import PermissionRequiredMixin
from django.contrib.contenttypes.models import ContentType
from django.db.models import Count, Q
from django.http import Http404
from django.shortcuts import get_object_or_404, redirect, render
from django.utils.safestring import mark_safe
from django.views.generic import View
from taggit.models import Tag
from utilities.forms import ConfirmationForm
from utilities.views import BulkDeleteView, ObjectDeleteView, ObjectEditView, ObjectListView
from . import filters
from .forms import ConfigContextForm, ConfigContextFilterForm, ImageAttachmentForm, ObjectChangeFilterForm, TagForm
from .models import ConfigContext, ImageAttachment, ObjectChange, ReportResult
from .reports import get_report, get_reports
from .tables import ConfigContextTable, ObjectChangeTable, TagTable
#
# Tags
#
class TagListView(ObjectListView):
queryset = Tag.objects.annotate(items=Count('taggit_taggeditem_items')).order_by('name')
table = TagTable
template_name = 'extras/tag_list.html'
class TagEditView(PermissionRequiredMixin, ObjectEditView):
permission_required = 'taggit.change_tag'
model = Tag
model_form = TagForm
default_return_url = 'extras:tag_list'
class TagDeleteView(PermissionRequiredMixin, ObjectDeleteView):
permission_required = 'taggit.delete_tag'
model = Tag
default_return_url = 'extras:tag_list'
class TagBulkDeleteView(PermissionRequiredMixin, BulkDeleteView):
permission_required = 'circuits.delete_circuittype'
queryset = Tag.objects.annotate(items=Count('taggit_taggeditem_items')).order_by('name')
table = TagTable
default_return_url = 'extras:tag_list'
#
# Config contexts
#
class ConfigContextListView(ObjectListView):
queryset = ConfigContext.objects.all()
filter = filters.ConfigContextFilter
filter_form = ConfigContextFilterForm
table = ConfigContextTable
template_name = 'extras/configcontext_list.html'
class ConfigContextView(View):
def get(self, request, pk):
configcontext = get_object_or_404(ConfigContext, pk=pk)
return render(request, 'extras/configcontext.html', {
'configcontext': configcontext,
})
class ConfigContextCreateView(PermissionRequiredMixin, ObjectEditView):
permission_required = 'extras.add_configcontext'
model = ConfigContext
model_form = ConfigContextForm
default_return_url = 'extras:configcontext_list'
template_name = 'extras/configcontext_edit.html'
class ConfigContextEditView(ConfigContextCreateView):
permission_required = 'extras.change_configcontext'
class ConfigContextDeleteView(PermissionRequiredMixin, ObjectDeleteView):
permission_required = 'extras.delete_configcontext'
model = ConfigContext
default_return_url = 'extras:configcontext_list'
class ConfigContextBulkDeleteView(PermissionRequiredMixin, BulkDeleteView):
permission_required = 'extras.delete_cconfigcontext'
queryset = ConfigContext.objects.all()
table = ConfigContextTable
default_return_url = 'extras:configcontext_list'
class ObjectConfigContextView(View):
object_class = None
base_template = None
def get(self, request, pk):
obj = get_object_or_404(self.object_class, pk=pk)
source_contexts = ConfigContext.objects.get_for_object(obj)
return render(request, 'extras/object_configcontext.html', {
self.object_class._meta.model_name: obj,
'rendered_context': obj.get_config_context(),
'source_contexts': source_contexts,
'base_template': self.base_template,
'active_tab': 'config-context',
})
#
# Change logging
#
class ObjectChangeListView(ObjectListView):
queryset = ObjectChange.objects.select_related('user', 'changed_object_type')
filter = filters.ObjectChangeFilter
filter_form = ObjectChangeFilterForm
table = ObjectChangeTable
template_name = 'extras/objectchange_list.html'
class ObjectChangeView(View):
def get(self, request, pk):
objectchange = get_object_or_404(ObjectChange, pk=pk)
related_changes = ObjectChange.objects.filter(request_id=objectchange.request_id).exclude(pk=objectchange.pk)
related_changes_table = ObjectChangeTable(
data=related_changes[:50],
orderable=False
)
return render(request, 'extras/objectchange.html', {
'objectchange': objectchange,
'related_changes_table': related_changes_table,
'related_changes_count': related_changes.count()
})
class ObjectChangeLogView(View):
"""
Present a history of changes made to a particular object.
"""
def get(self, request, model, **kwargs):
# Get object my model and kwargs (e.g. slug='foo')
obj = get_object_or_404(model, **kwargs)
# Gather all changes for this object (and its related objects)
content_type = ContentType.objects.get_for_model(model)
objectchanges = ObjectChange.objects.select_related(
'user', 'changed_object_type'
).filter(
Q(changed_object_type=content_type, changed_object_id=obj.pk) |
Q(related_object_type=content_type, related_object_id=obj.pk)
)
objectchanges_table = ObjectChangeTable(
data=objectchanges,
orderable=False
)
# Check whether a header template exists for this model
base_template = '{}/{}.html'.format(model._meta.app_label, model._meta.model_name)
try:
template.loader.get_template(base_template)
object_var = model._meta.model_name
except template.TemplateDoesNotExist:
base_template = '_base.html'
object_var = 'obj'
return render(request, 'extras/object_changelog.html', {
object_var: obj,
'objectchanges_table': objectchanges_table,
'base_template': base_template,
'active_tab': 'changelog',
})
#
# Image attachments
#
class ImageAttachmentEditView(PermissionRequiredMixin, ObjectEditView):
permission_required = 'extras.change_imageattachment'
model = ImageAttachment
model_form = ImageAttachmentForm
def alter_obj(self, imageattachment, request, args, kwargs):
if not imageattachment.pk:
# Assign the parent object based on URL kwargs
model = kwargs.get('model')
imageattachment.parent = get_object_or_404(model, pk=kwargs['object_id'])
return imageattachment
def get_return_url(self, request, imageattachment):
return imageattachment.parent.get_absolute_url()
class ImageAttachmentDeleteView(PermissionRequiredMixin, ObjectDeleteView):
permission_required = 'extras.delete_imageattachment'
model = ImageAttachment
def get_return_url(self, request, imageattachment):
return imageattachment.parent.get_absolute_url()
#
# Reports
#
class ReportListView(View):
"""
Retrieve all of the available reports from disk and the recorded ReportResult (if any) for each.
"""
def get(self, request):
reports = get_reports()
results = {r.report: r for r in ReportResult.objects.all()}
ret = []
for module, report_list in reports:
module_reports = []
for report in report_list:
report.result = results.get(report.full_name, None)
module_reports.append(report)
ret.append((module, module_reports))
return render(request, 'extras/report_list.html', {
'reports': ret,
})
class ReportView(View):
"""
Display a single Report and its associated ReportResult (if any).
"""
def get(self, request, name):
# Retrieve the Report by "<module>.<report>"
module_name, report_name = name.split('.')
report = get_report(module_name, report_name)
if report is None:
raise Http404
# Attach the ReportResult (if any)
report.result = ReportResult.objects.filter(report=report.full_name).first()
return render(request, 'extras/report.html', {
'report': report,
'run_form': ConfirmationForm(),
})
class ReportRunView(PermissionRequiredMixin, View):
"""
Run a Report and record a new ReportResult.
"""
permission_required = 'extras.add_reportresult'
def post(self, request, name):
# Retrieve the Report by "<module>.<report>"
module_name, report_name = name.split('.')
report = get_report(module_name, report_name)
if report is None:
raise Http404
form = ConfirmationForm(request.POST)
if form.is_valid():
# Run the Report. A new ReportResult is created.
report.run()
result = 'failed' if report.failed else 'passed'
msg = "Ran report {} ({})".format(report.full_name, result)
messages.success(request, mark_safe(msg))
return redirect('extras:report', name=report.full_name)
| 31.71134
| 117
| 0.700477
|
89a8eb9044a1c9782b8f2f1320323629f4d595bc
| 6,028
|
py
|
Python
|
Part-03-Understanding-Software-Crafting-Your-Own-Tools/models/edx-platform/lms/djangoapps/courseware/tests/test_self_paced_overrides.py
|
osoco/better-ways-of-thinking-about-software
|
83e70d23c873509e22362a09a10d3510e10f6992
|
[
"MIT"
] | 3
|
2021-12-15T04:58:18.000Z
|
2022-02-06T12:15:37.000Z
|
Part-03-Understanding-Software-Crafting-Your-Own-Tools/models/edx-platform/lms/djangoapps/courseware/tests/test_self_paced_overrides.py
|
osoco/better-ways-of-thinking-about-software
|
83e70d23c873509e22362a09a10d3510e10f6992
|
[
"MIT"
] | null | null | null |
Part-03-Understanding-Software-Crafting-Your-Own-Tools/models/edx-platform/lms/djangoapps/courseware/tests/test_self_paced_overrides.py
|
osoco/better-ways-of-thinking-about-software
|
83e70d23c873509e22362a09a10d3510e10f6992
|
[
"MIT"
] | 1
|
2019-01-02T14:38:50.000Z
|
2019-01-02T14:38:50.000Z
|
"""Tests for self-paced course due date overrides."""
import datetime
from unittest.mock import patch
import pytz
from django.test.utils import override_settings
from common.djangoapps.student.tests.factories import BetaTesterFactory
from lms.djangoapps.courseware.access import has_access
from lms.djangoapps.ccx.tests.test_overrides import inject_field_overrides
from lms.djangoapps.courseware.field_overrides import OverrideFieldData, OverrideModulestoreFieldData
from lms.djangoapps.discussion.django_comment_client.utils import get_accessible_discussion_xblocks
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory
@override_settings(
XBLOCK_FIELD_DATA_WRAPPERS=['lms.djangoapps.courseware.field_overrides:OverrideModulestoreFieldData.wrap'],
MODULESTORE_FIELD_OVERRIDE_PROVIDERS=[
'lms.djangoapps.courseware.self_paced_overrides.SelfPacedDateOverrideProvider'
],
)
class SelfPacedDateOverrideTest(ModuleStoreTestCase):
"""
Tests for self-paced due date overrides.
"""
def setUp(self):
self.reset_setting_cache_variables()
super().setUp()
self.non_staff_user, __ = self.create_non_staff_user()
self.now = datetime.datetime.now(pytz.UTC).replace(microsecond=0)
self.future = self.now + datetime.timedelta(days=30)
def tearDown(self):
self.reset_setting_cache_variables()
super().tearDown()
def reset_setting_cache_variables(self):
"""
The overridden settings for this class get cached on class variables.
Reset those to None before and after running the test to ensure clean
behavior.
"""
OverrideFieldData.provider_classes = None
OverrideModulestoreFieldData.provider_classes = None
def setup_course(self, **course_kwargs):
"""Set up a course with provided course attributes.
Creates a child block with a due date, and ensures that field
overrides are correctly applied for both blocks.
"""
course = CourseFactory.create(**course_kwargs)
section = ItemFactory.create(parent=course, due=self.now)
inject_field_overrides((course, section), course, self.user)
return (course, section)
def create_discussion_xblocks(self, parent): # lint-amnesty, pylint: disable=missing-function-docstring
# Create a released discussion xblock
ItemFactory.create(
parent=parent,
category='discussion',
display_name='released',
start=self.now,
)
# Create a scheduled discussion xblock
ItemFactory.create(
parent=parent,
category='discussion',
display_name='scheduled',
start=self.future,
)
def test_instructor_paced_due_date(self):
__, ip_section = self.setup_course(display_name="Instructor Paced Course", self_paced=False)
assert ip_section.due == self.now
def test_self_paced_due_date(self):
__, sp_section = self.setup_course(display_name="Self-Paced Course", self_paced=True)
assert sp_section.due is None
@patch.dict('lms.djangoapps.courseware.access.settings.FEATURES', {'DISABLE_START_DATES': False})
def test_course_access_to_beta_users(self):
"""
Test that beta testers can access `self_paced` course prior to start date.
"""
now = datetime.datetime.now(pytz.UTC)
one_month_from_now = now + datetime.timedelta(days=30)
course_options = {
'days_early_for_beta': 100,
'self_paced': True,
'start': one_month_from_now,
}
# Create a `self_paced` course and add a beta tester in it
self_paced_course, self_paced_section = self.setup_course(**course_options)
beta_tester = BetaTesterFactory(course_key=self_paced_course.id)
# Verify course is `self_paced` and course has start date but not section.
assert self_paced_course.self_paced
assert self_paced_course.start == one_month_from_now
assert self_paced_section.start is None
# Verify that non-staff user do not have access to the course
assert not has_access(self.non_staff_user, 'load', self_paced_course)
# Verify beta tester can access the course as well as the course sections
assert has_access(beta_tester, 'load', self_paced_course)
assert has_access(beta_tester, 'load', self_paced_section, self_paced_course.id)
@patch.dict('lms.djangoapps.courseware.access.settings.FEATURES', {'DISABLE_START_DATES': False})
def test_instructor_paced_discussion_xblock_visibility(self):
"""
Verify that discussion xblocks scheduled for release in the future are
not visible to students in an instructor-paced course.
"""
course, section = self.setup_course(start=self.now, self_paced=False)
self.create_discussion_xblocks(section)
# Only the released xblocks should be visible when the course is instructor-paced.
xblocks = get_accessible_discussion_xblocks(course, self.non_staff_user)
assert all((xblock.display_name == 'released') for xblock in xblocks)
@patch.dict('lms.djangoapps.courseware.access.settings.FEATURES', {'DISABLE_START_DATES': False})
def test_self_paced_discussion_xblock_visibility(self):
"""
Regression test. Verify that discussion xblocks scheduled for release
in the future are visible to students in a self-paced course.
"""
course, section = self.setup_course(start=self.now, self_paced=True)
self.create_discussion_xblocks(section)
# The scheduled xblocks should be visible when the course is self-paced.
xblocks = get_accessible_discussion_xblocks(course, self.non_staff_user)
assert len(xblocks) == 2
assert any((xblock.display_name == 'scheduled') for xblock in xblocks)
| 43.057143
| 111
| 0.712674
|
22b38406620e76a841e21ded6117cd8f7eaac5e9
| 1,548
|
py
|
Python
|
Languages/Python/calculator.py
|
ayushverma0028/Hacktober2021
|
379832be8adb3a58e87142bad46bb4d0e93942cd
|
[
"MIT"
] | 1
|
2022-02-01T20:59:35.000Z
|
2022-02-01T20:59:35.000Z
|
Languages/Python/calculator.py
|
ayushverma0028/Hacktober2021
|
379832be8adb3a58e87142bad46bb4d0e93942cd
|
[
"MIT"
] | null | null | null |
Languages/Python/calculator.py
|
ayushverma0028/Hacktober2021
|
379832be8adb3a58e87142bad46bb4d0e93942cd
|
[
"MIT"
] | null | null | null |
# Simple Calculator Python
# Enter Choice
# 1.Add 2.Subtract 3. Multiply 4.Divide ('1', '2', '3', '4')
# Enter Two Values
# Get Answer
# Next Calculation : (yes/no)
# ----------------------------------
# This function adds two numbers
def add(x, y):
return x + y
# This function subtracts two numbers
def subtract(x, y):
return x - y
# This function multiplies two numbers
def multiply(x, y):
return x * y
# This function divides two numbers
def divide(x, y):
return x / y
print("Select operation.")
print("1.Add")
print("2.Subtract")
print("3.Multiply")
print("4.Divide")
while True:
# take input from the user
choice = input("Enter choice(1/2/3/4): ")
# check if choice is one of the four options
if choice in ('1', '2', '3', '4'):
num1 = float(input("Enter first number: "))
num2 = float(input("Enter second number: "))
if choice == '1':
print(num1, "+", num2, "=", add(num1, num2))
elif choice == '2':
print(num1, "-", num2, "=", subtract(num1, num2))
elif choice == '3':
print(num1, "*", num2, "=", multiply(num1, num2))
elif choice == '4':
print(num1, "/", num2, "=", divide(num1, num2))
# check if user wants another calculation
# break the while loop if answer is no
next_calculation = input("Let's do next calculation? (y/n): ")
if next_calculation == "no":
break
else:
print("Invalid Input")
# ----------------------------------
| 24.571429
| 70
| 0.543282
|
4116d90fa6182401e94186ac528e167141501129
| 800
|
py
|
Python
|
dev_tools/__init__.py
|
jlmayfield/Cirq
|
dc1294f54118a9a4f92546ca13780b91615dd675
|
[
"Apache-2.0"
] | 3,326
|
2018-07-18T23:17:21.000Z
|
2022-03-29T22:28:24.000Z
|
dev_tools/__init__.py
|
jlmayfield/Cirq
|
dc1294f54118a9a4f92546ca13780b91615dd675
|
[
"Apache-2.0"
] | 3,443
|
2018-07-18T21:07:28.000Z
|
2022-03-31T20:23:21.000Z
|
dev_tools/__init__.py
|
jlmayfield/Cirq
|
dc1294f54118a9a4f92546ca13780b91615dd675
|
[
"Apache-2.0"
] | 865
|
2018-07-18T23:30:24.000Z
|
2022-03-30T11:43:23.000Z
|
# Copyright 2018 The Cirq Developers
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tooling useful when developing (e.g. scripts to run tests).
These tools use shell commands, and so are not portable between operating
systems. Currently they assume that the system is based on Debian Linux.
"""
| 40
| 74
| 0.76625
|
025801b898e3aea80be11e1b1ffa6e05613ad3ae
| 2,986
|
py
|
Python
|
source/scripts/aggregatePhyloGenotypeDetailFiles.py
|
emilydolson/interpreting_the_tape_of_life
|
48fd46f3cd2a7f07ce4ea1fd6fa120b520e34925
|
[
"MIT"
] | null | null | null |
source/scripts/aggregatePhyloGenotypeDetailFiles.py
|
emilydolson/interpreting_the_tape_of_life
|
48fd46f3cd2a7f07ce4ea1fd6fa120b520e34925
|
[
"MIT"
] | null | null | null |
source/scripts/aggregatePhyloGenotypeDetailFiles.py
|
emilydolson/interpreting_the_tape_of_life
|
48fd46f3cd2a7f07ce4ea1fd6fa120b520e34925
|
[
"MIT"
] | null | null | null |
'''
Script to make a bank of treatment, run_id, org_id, sequence, task profile for all genotype_details-*.dat files in given data directory.
'''
import argparse, os, copy, errno, csv, subprocess, sys
tasks = ["not", "nand", "and", "ornot", "or", "andnot", "nor", "xor", "equals"]
def ParseDetailFile(detail_fpath):
"""
Given file pointer to detail file, extract information into form below:
return [{"detail":value, "detail":value, ...}, ...]
"""
orgs = []
with open(detail_fpath, "r") as detail_fp:
######################
# Step 1) Build Legend
###
# Travel to the legend.
for line in detail_fp:
if line == "# Legend:\n": break
# Consume the legend.
details = []
for line in detail_fp:
if line == "\n": break
details.append(line.split(":")[-1].strip())
######################
# Step 2) Consume Organisms
###
for line in detail_fp:
org_dets = line.strip().split(" ")
org = {details[i].lower():org_dets[i] for i in range(0, len(org_dets))}
orgs.append(org)
return orgs
def mkdir_p(path):
"""
This is functionally equivalent to the mkdir -p [fname] bash command
"""
try:
os.makedirs(path)
except OSError as exc: # Python >2.5
if exc.errno == errno.EEXIST and os.path.isdir(path):
pass
else: raise
def main():
parser = argparse.ArgumentParser(description="Data aggregation script.")
parser.add_argument("where_all_the_detail_things_are", type=str, help="Target experiment directory.")
args = parser.parse_args()
data_directory = args.where_all_the_detail_things_are
# Grab a list of treatments in data directory
detail_files = [f for f in os.listdir(data_directory) if "phylogeny_sequence_details" in f]
# print("Detail files found: " + str(detail_files))
bank_content = "treatment,run_id,sequence,gestation_time,task_profile," + ",".join(tasks) + "\n"
for detail_file in detail_files:
print("Pulling sequences from: {}".format(detail_file))
detail_path = os.path.join(data_directory, detail_file)
details = ParseDetailFile(detail_path)
treatment = detail_file.split("__")[-1].split("-")[0]
run_id = detail_file.split("__")[-1].split("-")[-1].split(".")[0]
for i in range(0, len(details)):
gestation_time = details[i]["gestation time"]
sequence = details[i]["genome sequence"]
task_profile = "-".join([task for task in tasks if details[i][task] == "1"])
task_performance = [details[i][task] for task in tasks]
bank_content += ",".join(map(str, [treatment,run_id,sequence,gestation_time,task_profile])) + "," + ",".join(map(str,task_performance)) + "\n"
with open("genotype_bank.csv", "w") as fp:
fp.write(bank_content)
if __name__ == "__main__":
main()
| 38.779221
| 154
| 0.600134
|
5ed7f241755027fb15facd4f2792c4695705498d
| 3,258
|
py
|
Python
|
components/aws/sagemaker/model/src/create_model.py
|
kevinbache/pipelines
|
02f5bc08ffb17ab955d497fde6fbbc1a5d6fcde6
|
[
"Apache-2.0"
] | 3
|
2019-09-18T01:33:27.000Z
|
2021-06-09T08:19:10.000Z
|
components/aws/sagemaker/model/src/create_model.py
|
kevinbache/pipelines
|
02f5bc08ffb17ab955d497fde6fbbc1a5d6fcde6
|
[
"Apache-2.0"
] | 12
|
2020-09-26T01:03:09.000Z
|
2022-03-03T23:12:24.000Z
|
components/aws/sagemaker/model/src/create_model.py
|
kevinbache/pipelines
|
02f5bc08ffb17ab955d497fde6fbbc1a5d6fcde6
|
[
"Apache-2.0"
] | 5
|
2020-08-31T08:48:46.000Z
|
2021-11-26T07:29:04.000Z
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import logging
from common import _utils
def main(argv=None):
parser = argparse.ArgumentParser(description='SageMaker Training Job')
parser.add_argument('--region', type=str.strip, required=True, help='The region where the cluster launches.')
parser.add_argument('--model_name', type=str.strip, required=True, help='The name of the new model.')
parser.add_argument('--role', type=str.strip, required=True, help='The Amazon Resource Name (ARN) that Amazon SageMaker assumes to perform tasks on your behalf.')
parser.add_argument('--container_host_name', type=str.strip, required=False, help='When a ContainerDefinition is part of an inference pipeline, this value uniquely identifies the container for the purposes of logging and metrics.', default='')
parser.add_argument('--image', type=str.strip, required=False, help='The Amazon EC2 Container Registry (Amazon ECR) path where inference code is stored.', default='')
parser.add_argument('--model_artifact_url', type=str.strip, required=False, help='S3 path where Amazon SageMaker to store the model artifacts.', default='')
parser.add_argument('--environment', type=_utils.str_to_json_dict, required=False, help='The dictionary of the environment variables to set in the Docker container. Up to 16 key-value entries in the map.', default='{}')
parser.add_argument('--model_package', type=str.strip, required=False, help='The name or Amazon Resource Name (ARN) of the model package to use to create the model.', default='')
parser.add_argument('--secondary_containers', type=_utils.str_to_json_list, required=False, help='A list of dicts that specifies the additional containers in the inference pipeline.', default='{}')
parser.add_argument('--vpc_security_group_ids', type=str.strip, required=False, help='The VPC security group IDs, in the form sg-xxxxxxxx.', default='')
parser.add_argument('--vpc_subnets', type=str.strip, required=False, help='The ID of the subnets in the VPC to which you want to connect your hpo job.', default='')
parser.add_argument('--network_isolation', type=_utils.str_to_bool, required=False, help='Isolates the training container.', default=True)
parser.add_argument('--tags', type=_utils.str_to_json_dict, required=False, help='An array of key-value pairs, to categorize AWS resources.', default='{}')
args = parser.parse_args()
logging.getLogger().setLevel(logging.INFO)
client = _utils.get_client(args.region)
logging.info('Submitting model creation request to SageMaker...')
_utils.create_model(client, vars(args))
logging.info('Model creation completed.')
with open('/tmp/model_name.txt', 'w') as f:
f.write(args.model_name)
if __name__== "__main__":
main()
| 67.875
| 245
| 0.760589
|
2f929dde9f591cec996faeda2cd7a602136c64af
| 751
|
py
|
Python
|
gcp-bigtable/tests/conftest.py
|
tsungchih/learning-gcp
|
30890c5042a89767ece7d0087152b87aa529130c
|
[
"Apache-2.0"
] | null | null | null |
gcp-bigtable/tests/conftest.py
|
tsungchih/learning-gcp
|
30890c5042a89767ece7d0087152b87aa529130c
|
[
"Apache-2.0"
] | null | null | null |
gcp-bigtable/tests/conftest.py
|
tsungchih/learning-gcp
|
30890c5042a89767ece7d0087152b87aa529130c
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
import pytest
def pytest_addoption(parser):
parser.addoption(
"--projectid",
action="store",
help="The project ID on GCP.")
parser.addoption(
"--instanceid",
action="store",
help="The instance ID of Bigtable on GCP.")
parser.addoption(
"--tablename",
action="store",
help="The table name in Bigtable on GCP."
)
@pytest.fixture(scope="class")
def projectid(request):
return request.config.getoption("--projectid")
@pytest.fixture(scope="class")
def instanceid(request):
return request.config.getoption("--instanceid")
@pytest.fixture(scope="class")
def tablename(request):
return request.config.getoption("--tablename")
| 21.457143
| 51
| 0.640479
|
82c49bc26a352bdde2cfcbaa57e1408945920896
| 2,384
|
py
|
Python
|
test_library/users/tests/test_views.py
|
fr019/test_library
|
39fa1086b498194e30548c7abe444b346d827ac8
|
[
"MIT"
] | null | null | null |
test_library/users/tests/test_views.py
|
fr019/test_library
|
39fa1086b498194e30548c7abe444b346d827ac8
|
[
"MIT"
] | null | null | null |
test_library/users/tests/test_views.py
|
fr019/test_library
|
39fa1086b498194e30548c7abe444b346d827ac8
|
[
"MIT"
] | null | null | null |
import pytest
from django.contrib.auth.models import AnonymousUser
from django.http.response import Http404
from django.test import RequestFactory
from test_library.users.models import User
from test_library.users.tests.factories import UserFactory
from test_library.users.views import (
UserRedirectView,
UserUpdateView,
user_detail_view,
)
pytestmark = pytest.mark.django_db
class TestUserUpdateView:
"""
TODO:
extracting view initialization code as class-scoped fixture
would be great if only pytest-django supported non-function-scoped
fixture db access -- this is a work-in-progress for now:
https://github.com/pytest-dev/pytest-django/pull/258
"""
def test_get_success_url(self, user: User, rf: RequestFactory):
view = UserUpdateView()
request = rf.get("/fake-url/")
request.user = user
view.request = request
assert view.get_success_url() == f"/users/{user.username}/"
def test_get_object(self, user: User, rf: RequestFactory):
view = UserUpdateView()
request = rf.get("/fake-url/")
request.user = user
view.request = request
assert view.get_object() == user
class TestUserRedirectView:
def test_get_redirect_url(self, user: User, rf: RequestFactory):
view = UserRedirectView()
request = rf.get("/fake-url")
request.user = user
view.request = request
assert view.get_redirect_url() == f"/users/{user.username}/"
class TestUserDetailView:
def test_authenticated(self, user: User, rf: RequestFactory):
request = rf.get("/fake-url/")
request.user = UserFactory()
response = user_detail_view(request, username=user.username)
assert response.status_code == 200
def test_not_authenticated(self, user: User, rf: RequestFactory):
request = rf.get("/fake-url/")
request.user = AnonymousUser()
response = user_detail_view(request, username=user.username)
assert response.status_code == 302
assert response.url == "/accounts/login/?next=/fake-url/"
def test_case_sensitivity(self, rf: RequestFactory):
request = rf.get("/fake-url/")
request.user = UserFactory(username="UserName")
with pytest.raises(Http404):
user_detail_view(request, username="username")
| 29.8
| 74
| 0.675336
|
a304065bcb143e8c672c2c9ff5cce350cc38e0c7
| 1,428
|
py
|
Python
|
renewable_energy_analysis/cleaning/gdp.py
|
Batto1300/renewable-energy-analysis
|
0825f024779a9469ac46175a80069f2e3ae0943a
|
[
"MIT"
] | 1
|
2018-11-25T14:46:21.000Z
|
2018-11-25T14:46:21.000Z
|
renewable_energy_analysis/cleaning/gdp.py
|
Batto1300/renewable-energy-analysis
|
0825f024779a9469ac46175a80069f2e3ae0943a
|
[
"MIT"
] | 4
|
2018-10-10T14:48:54.000Z
|
2018-11-20T12:10:30.000Z
|
renewable_energy_analysis/cleaning/gdp.py
|
Batto1300/renewable-energy-analysis
|
0825f024779a9469ac46175a80069f2e3ae0943a
|
[
"MIT"
] | 1
|
2018-12-18T06:38:08.000Z
|
2018-12-18T06:38:08.000Z
|
import pandas as pd
import file_names as fn
# take absolute non os-dependant paths from file_names
GDP_PATH = fn.OriginalPaths.GDP
C_GDP_PATH = fn.CleanedPaths.GDP
YEARS_PATH = fn.CleanedPaths.YEARS
COUNTRIES_PATH = fn.CleanedPaths.COUNTRIES
# define hardcoded strings
IND_COL_NAME = 'IndicatorName'
GDP_IND_VALUE = 'Gross Domestic Product (GDP)'
COUNTRY_COLUMN_NAME = 'Country'
YEARS_COULMN_NAME = 'years'
# open files as dataframes
gdp_df = pd.read_csv(open(GDP_PATH), skiprows=2)
# read years skipping the first row
years_list = open(YEARS_PATH).readlines()[1:]
# remove the newline character for every value in the list
years_list = list(map(lambda x: x[:-1],years_list))
countries_df = pd.read_csv(open(COUNTRIES_PATH))
# select values referring to GDP only
gdp_df = gdp_df[gdp_df[IND_COL_NAME] == GDP_IND_VALUE]
# rename countries
gdp_df = gdp_df.rename(columns={COUNTRY_COLUMN_NAME:COUNTRY_COLUMN_NAME.lower()})
# drop indicator column (every row has the same value now)
gdp_df = gdp_df.drop([IND_COL_NAME], axis=1)
# join used as a filter operation with countries and years dataframes
gdp_df = pd.merge(gdp_df,countries_df, on=COUNTRY_COLUMN_NAME.lower())
# filter out years setting the index so to filter only the columns
gdp_df = gdp_df.set_index(COUNTRY_COLUMN_NAME.lower())
gdp_df = gdp_df[years_list]
gdp_df = gdp_df.applymap(lambda x: x.replace(".",""))
# save the resulting dataframe
gdp_df.to_csv(C_GDP_PATH)
| 38.594595
| 81
| 0.787115
|
183c0828160314b2e508721fde5f3001ae274c48
| 244
|
py
|
Python
|
02.Sort/NY/B1181.py
|
SP2021-2/Algorithm
|
2e629eb5234212fad8bbc11491aad068e5783780
|
[
"MIT"
] | 1
|
2021-11-21T06:03:06.000Z
|
2021-11-21T06:03:06.000Z
|
02.Sort/NY/B1181.py
|
SP2021-2/Algorithm
|
2e629eb5234212fad8bbc11491aad068e5783780
|
[
"MIT"
] | 2
|
2021-10-13T07:21:09.000Z
|
2021-11-14T13:53:08.000Z
|
02.Sort/NY/B1181.py
|
SP2021-2/Algorithm
|
2e629eb5234212fad8bbc11491aad068e5783780
|
[
"MIT"
] | null | null | null |
# 백준 1181 - 단어 정렬
# 후기
# - 더 효과적인 방법이 있나? 흠
N = int(input())
words = []
for i in range (N) :
word = input()
words.append((len(word), word))
words = list(set(words))
words.sort()
for i in range (len(words)) :
print(words[i][1])
| 13.555556
| 35
| 0.561475
|
e4bcccdc3d300dc82a19d549615d703190c2907a
| 160
|
py
|
Python
|
swagger_server/models/__init__.py
|
DITAS-Project/data-utility-evaluator
|
5333454652d16b619239a9cd4d9b3fec3476710e
|
[
"Apache-2.0"
] | null | null | null |
swagger_server/models/__init__.py
|
DITAS-Project/data-utility-evaluator
|
5333454652d16b619239a9cd4d9b3fec3476710e
|
[
"Apache-2.0"
] | null | null | null |
swagger_server/models/__init__.py
|
DITAS-Project/data-utility-evaluator
|
5333454652d16b619239a9cd4d9b3fec3476710e
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
# flake8: noqa
from __future__ import absolute_import
# import models into model package
from swagger_server.models.blueprint import Blueprint
| 22.857143
| 53
| 0.81875
|
222b9b8c3fefe22864412328148ff19cde612d11
| 4,096
|
py
|
Python
|
setup.py
|
Manikantan22/incubator-superset
|
ec325c871e60ae2a050aae595b430d6fc2888d1a
|
[
"Apache-2.0"
] | 1
|
2019-09-12T03:49:22.000Z
|
2019-09-12T03:49:22.000Z
|
setup.py
|
Manikantan22/incubator-superset
|
ec325c871e60ae2a050aae595b430d6fc2888d1a
|
[
"Apache-2.0"
] | null | null | null |
setup.py
|
Manikantan22/incubator-superset
|
ec325c871e60ae2a050aae595b430d6fc2888d1a
|
[
"Apache-2.0"
] | null | null | null |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import io
import json
import os
import subprocess
import sys
from setuptools import find_packages, setup
if sys.version_info < (3, 6):
sys.exit("Sorry, Python < 3.6 is not supported")
BASE_DIR = os.path.abspath(os.path.dirname(__file__))
PACKAGE_JSON = os.path.join(BASE_DIR, "superset", "assets", "package.json")
with open(PACKAGE_JSON, "r") as package_file:
version_string = json.load(package_file)["version"]
with io.open("README.md", "r", encoding="utf-8") as f:
long_description = f.read()
def get_git_sha():
try:
s = subprocess.check_output(["git", "rev-parse", "HEAD"])
return s.decode().strip()
except Exception:
return ""
GIT_SHA = get_git_sha()
version_info = {"GIT_SHA": GIT_SHA, "version": version_string}
print("-==-" * 15)
print("VERSION: " + version_string)
print("GIT SHA: " + GIT_SHA)
print("-==-" * 15)
VERSION_INFO_FILE = os.path.join(
BASE_DIR, "superset", "static", "assets", "version_info.json"
)
with open(VERSION_INFO_FILE, "w") as version_file:
json.dump(version_info, version_file)
setup(
name="apache-superset",
description=("A modern, enterprise-ready business intelligence web application"),
long_description=long_description,
long_description_content_type="text/markdown",
version=version_string,
packages=find_packages(),
include_package_data=True,
zip_safe=False,
scripts=["superset/bin/superset"],
install_requires=[
"backoff>=1.8.0",
"bleach>=3.0.2, <4.0.0",
"celery>=4.3.0, <5.0.0",
"click>=6.0, <7.0.0", # `click`>=7 forces "-" instead of "_"
"colorama",
"contextlib2",
"croniter>=0.3.28",
"cryptography>=2.4.2",
"flask>=1.1.0, <2.0.0",
"flask-appbuilder>=2.1.13, <2.3.0",
"flask-caching",
"flask-compress",
"flask-talisman",
"flask-migrate",
"flask-wtf",
"geopy",
"gunicorn<19.9.0", # deprecated
"humanize",
"isodate",
"markdown>=3.0",
"msgpack>=0.6.1, <0.7.0",
"pandas>=0.24.2, <0.25.0",
"parsedatetime",
"pathlib2",
"polyline",
"python-dateutil",
"python-dotenv",
"python-geohash",
"pyarrow>=0.14.1, <0.15.0",
"pyyaml>=5.1",
"retry>=0.9.2",
"selenium>=3.141.0",
"simplejson>=3.15.0",
"sqlalchemy>=1.3.5,<2.0",
"sqlalchemy-utils>=0.33.2",
"sqlparse>=0.3.0,<0.4",
"wtforms-json",
],
extras_require={
"bigquery": ["pybigquery>=0.4.10", "pandas_gbq>=0.10.0"],
"cors": ["flask-cors>=2.0.0"],
"gsheets": ["gsheetsdb>=0.1.9"],
"hive": ["pyhive[hive]>=0.6.1", "tableschema", "thrift>=0.11.0, <1.0.0"],
"mysql": ["mysqlclient==1.4.2.post1"],
"postgres": ["psycopg2-binary==2.7.5"],
"presto": ["pyhive[presto]>=0.4.0"],
"druid": ["pydruid==0.5.2", "requests==2.22.0"],
},
python_requires="~=3.6",
author="Apache Software Foundation",
author_email="dev@superset.incubator.apache.org",
url="https://superset.apache.org/",
download_url=(
"https://dist.apache.org/repos/dist/release/superset/" + version_string
),
classifiers=["Programming Language :: Python :: 3.6"],
)
| 31.751938
| 85
| 0.617432
|
14178a0c23ab75d878d0ed16af702b77229abc9f
| 4,199
|
py
|
Python
|
lnbits/extensions/withdraw/crud.py
|
taxmeifyoucan/lnbits
|
19ae1ddf0d50b507135c418af9d5becc336d5ce3
|
[
"MIT"
] | 258
|
2020-04-27T21:36:21.000Z
|
2021-10-30T23:24:48.000Z
|
lnbits/extensions/withdraw/crud.py
|
taxmeifyoucan/lnbits
|
19ae1ddf0d50b507135c418af9d5becc336d5ce3
|
[
"MIT"
] | 283
|
2020-04-27T17:23:12.000Z
|
2021-11-01T10:07:20.000Z
|
lnbits/extensions/withdraw/crud.py
|
taxmeifyoucan/lnbits
|
19ae1ddf0d50b507135c418af9d5becc336d5ce3
|
[
"MIT"
] | 109
|
2020-04-28T06:00:17.000Z
|
2021-10-13T02:48:28.000Z
|
from datetime import datetime
from typing import List, Optional, Union
from lnbits.helpers import urlsafe_short_hash
from . import db
from .models import WithdrawLink, HashCheck
async def create_withdraw_link(
*,
wallet_id: str,
title: str,
min_withdrawable: int,
max_withdrawable: int,
uses: int,
wait_time: int,
is_unique: bool,
usescsv: str,
) -> WithdrawLink:
link_id = urlsafe_short_hash()
await db.execute(
"""
INSERT INTO withdraw.withdraw_link (
id,
wallet,
title,
min_withdrawable,
max_withdrawable,
uses,
wait_time,
is_unique,
unique_hash,
k1,
open_time,
usescsv
)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
""",
(
link_id,
wallet_id,
title,
min_withdrawable,
max_withdrawable,
uses,
wait_time,
int(is_unique),
urlsafe_short_hash(),
urlsafe_short_hash(),
int(datetime.now().timestamp()) + wait_time,
usescsv,
),
)
link = await get_withdraw_link(link_id, 0)
assert link, "Newly created link couldn't be retrieved"
return link
async def get_withdraw_link(link_id: str, num=0) -> Optional[WithdrawLink]:
row = await db.fetchone(
"SELECT * FROM withdraw.withdraw_link WHERE id = ?", (link_id,)
)
if not row:
return None
link = []
for item in row:
link.append(item)
link.append(num)
return WithdrawLink._make(link)
async def get_withdraw_link_by_hash(unique_hash: str, num=0) -> Optional[WithdrawLink]:
row = await db.fetchone(
"SELECT * FROM withdraw.withdraw_link WHERE unique_hash = ?", (unique_hash,)
)
if not row:
return None
link = []
for item in row:
link.append(item)
link.append(num)
return WithdrawLink._make(link)
async def get_withdraw_links(wallet_ids: Union[str, List[str]]) -> List[WithdrawLink]:
if isinstance(wallet_ids, str):
wallet_ids = [wallet_ids]
q = ",".join(["?"] * len(wallet_ids))
rows = await db.fetchall(
f"SELECT * FROM withdraw.withdraw_link WHERE wallet IN ({q})", (*wallet_ids,)
)
return [WithdrawLink.from_row(row) for row in rows]
async def update_withdraw_link(link_id: str, **kwargs) -> Optional[WithdrawLink]:
if "is_unique" in kwargs:
kwargs["is_unique"] = int(kwargs["is_unique"])
q = ", ".join([f"{field[0]} = ?" for field in kwargs.items()])
await db.execute(
f"UPDATE withdraw.withdraw_link SET {q} WHERE id = ?",
(*kwargs.values(), link_id),
)
row = await db.fetchone(
"SELECT * FROM withdraw.withdraw_link WHERE id = ?", (link_id,)
)
return WithdrawLink.from_row(row) if row else None
async def delete_withdraw_link(link_id: str) -> None:
await db.execute("DELETE FROM withdraw.withdraw_link WHERE id = ?", (link_id,))
def chunks(lst, n):
for i in range(0, len(lst), n):
yield lst[i : i + n]
async def create_hash_check(
the_hash: str,
lnurl_id: str,
) -> HashCheck:
await db.execute(
"""
INSERT INTO withdraw.hash_check (
id,
lnurl_id
)
VALUES (?, ?)
""",
(
the_hash,
lnurl_id,
),
)
hashCheck = await get_hash_check(the_hash, lnurl_id)
return hashCheck
async def get_hash_check(the_hash: str, lnurl_id: str) -> Optional[HashCheck]:
rowid = await db.fetchone(
"SELECT * FROM withdraw.hash_check WHERE id = ?", (the_hash,)
)
rowlnurl = await db.fetchone(
"SELECT * FROM withdraw.hash_check WHERE lnurl_id = ?", (lnurl_id,)
)
if not rowlnurl:
await create_hash_check(the_hash, lnurl_id)
return {"lnurl": True, "hash": False}
else:
if not rowid:
await create_hash_check(the_hash, lnurl_id)
return {"lnurl": True, "hash": False}
else:
return {"lnurl": True, "hash": True}
| 26.24375
| 87
| 0.577757
|
3dfcc877f5e66686e87c813ae22785081d1ab526
| 4,872
|
py
|
Python
|
meerk40t/core/node/elem_line.py
|
joerlane/meerk40t
|
a75d78848ff1682640e112111fb6ac4e23e08616
|
[
"MIT"
] | null | null | null |
meerk40t/core/node/elem_line.py
|
joerlane/meerk40t
|
a75d78848ff1682640e112111fb6ac4e23e08616
|
[
"MIT"
] | null | null | null |
meerk40t/core/node/elem_line.py
|
joerlane/meerk40t
|
a75d78848ff1682640e112111fb6ac4e23e08616
|
[
"MIT"
] | null | null | null |
from copy import copy
from meerk40t.core.node.node import Node, Linecap, Linejoin, Fillrule
from meerk40t.svgelements import Path
class LineNode(Node):
"""
LineNode is the bootstrapped node type for the 'elem line' type.
"""
def __init__(
self,
shape=None,
matrix=None,
fill=None,
stroke=None,
stroke_width=None,
linecap = None,
linejoin = None,
fillrule= None,
**kwargs,
):
super(LineNode, self).__init__(type="elem line", **kwargs)
self.shape = shape
self.settings = kwargs
if matrix is None:
self.matrix = shape.transform
else:
self.matrix = matrix
if fill is None:
self.fill = shape.fill
else:
self.fill = fill
if stroke is None:
self.stroke = shape.stroke
else:
self.stroke = stroke
if stroke_width is None:
self.stroke_width = shape.stroke_width
else:
self.stroke_width = stroke_width
if linecap is None:
self.linecap = Linecap.CAP_BUTT
else:
self.linecap = linecap
if linejoin is None:
self.linejoin = Linejoin.JOIN_MITER
else:
self.linejoin = linejoin
if fillrule is None:
self.fillrule = Fillrule.FILLRULE_NONZERO
else:
self.fillrule = fillrule
self.lock = False
def __copy__(self):
return LineNode(
shape=copy(self.shape),
matrix=copy(self.matrix),
fill=copy(self.fill),
stroke=copy(self.stroke),
stroke_width=self.stroke_width,
linecap=self.linecap,
linejoin=self.linejoin,
fillrule=self.fillrule,
**self.settings,
)
def __repr__(self):
return "%s('%s', %s, %s)" % (
self.__class__.__name__,
self.type,
str(self.shape),
str(self._parent),
)
@property
def bounds(self):
if self._bounds_dirty:
self.shape.transform = self.matrix
self.shape.stroke_width = self.stroke_width
self._bounds = self.shape.bbox(with_stroke=True)
self._bounds_dirty = False
return self._bounds
def preprocess(self, context, matrix, commands):
self.matrix *= matrix
self.shape.transform = self.matrix
self.shape.stroke_width = self.stroke_width
self._bounds_dirty = True
def default_map(self, default_map=None):
default_map = super(LineNode, self).default_map(default_map=default_map)
default_map["element_type"] = "Line"
default_map.update(self.settings)
default_map["stroke"] = self.stroke
default_map["fill"] = self.fill
default_map["stroke-width"] = self.stroke_width
default_map["matrix"] = self.matrix
return default_map
def drop(self, drag_node):
# Dragging element into element.
if drag_node.type.startswith("elem"):
self.insert_sibling(drag_node)
return True
return False
def revalidate_points(self):
bounds = self.bounds
if bounds is None:
return
if len(self._points) < 9:
self._points.extend([None] * (9 - len(self._points)))
self._points[0] = [bounds[0], bounds[1], "bounds top_left"]
self._points[1] = [bounds[2], bounds[1], "bounds top_right"]
self._points[2] = [bounds[0], bounds[3], "bounds bottom_left"]
self._points[3] = [bounds[2], bounds[3], "bounds bottom_right"]
cx = (bounds[0] + bounds[2]) / 2
cy = (bounds[1] + bounds[3]) / 2
self._points[4] = [cx, cy, "bounds center_center"]
self._points[5] = [cx, bounds[1], "bounds top_center"]
self._points[6] = [cx, bounds[3], "bounds bottom_center"]
self._points[7] = [bounds[0], cy, "bounds center_left"]
self._points[8] = [bounds[2], cy, "bounds center_right"]
obj = self.object
if hasattr(obj, "point"):
if len(self._points) <= 11:
self._points.extend([None] * (11 - len(self._points)))
start = obj.point(0)
end = obj.point(1)
self._points[9] = [start[0], start[1], "endpoint"]
self._points[10] = [end[0], end[1], "endpoint"]
def update_point(self, index, point):
return False
def add_point(self, point, index=None):
return False
def as_path(self):
self.shape.transform = self.matrix
self.shape.stroke_width = self.stroke_width
self.shape.linecap = self.linecap
self.shape.linejoin = self.linejoin
self.shape.fillrule = self.fillrule
return abs(Path(self.shape))
| 32.697987
| 80
| 0.569992
|
320cea4f657537b91958004f561ae5f843c87c65
| 18,376
|
py
|
Python
|
vnpy/trader/gateway/futuGateway/futuGateway.py
|
tanzedan/vnpy
|
16c616ece1597a5766bf2fb3529f5789958330b6
|
[
"MIT"
] | 4
|
2018-04-05T15:35:02.000Z
|
2022-01-04T11:23:19.000Z
|
vnpy/trader/gateway/futuGateway/futuGateway.py
|
motw2014/vnpy
|
16c616ece1597a5766bf2fb3529f5789958330b6
|
[
"MIT"
] | null | null | null |
vnpy/trader/gateway/futuGateway/futuGateway.py
|
motw2014/vnpy
|
16c616ece1597a5766bf2fb3529f5789958330b6
|
[
"MIT"
] | 1
|
2019-03-17T14:36:08.000Z
|
2019-03-17T14:36:08.000Z
|
# encoding: UTF-8
'''
富途证券的gateway接入
'''
import json
from collections import OrderedDict
from threading import Thread
from time import sleep
from datetime import datetime
from copy import copy
import futuquant as ft
from futuquant.open_context import (RET_ERROR, RET_OK,
StockQuoteHandlerBase, OrderBookHandlerBase,
USTradeOrderHandlerBase, USTradeDealHandlerBase,
HKTradeOrderHandlerBase, HKTradeDealHandlerBase)
from vnpy.trader.vtGateway import *
from vnpy.trader.vtConstant import GATEWAYTYPE_INTERNATIONAL
from vnpy.trader.vtFunction import getJsonPath
# 调用一次datetime,保证初始化
tmp = datetime.strptime('20171123', '%Y%m%d')
# 常量数据映射
productMap = OrderedDict()
productMap[PRODUCT_EQUITY] = 'STOCK'
productMap[PRODUCT_INDEX] = 'IDX'
productMap[PRODUCT_ETF] = 'ETF'
productMap[PRODUCT_WARRANT] = 'WARRANT'
productMap[PRODUCT_BOND] = 'BOND'
directionMap = {}
directionMap[DIRECTION_LONG] = '0'
directionMap[DIRECTION_SHORT] = '1'
directionMapReverse = {v:k for k,v in directionMap.items()}
statusMapReverse = {}
statusMapReverse['0'] = STATUS_UNKNOWN
statusMapReverse['1'] = STATUS_NOTTRADED
statusMapReverse['2'] = STATUS_PARTTRADED
statusMapReverse['3'] = STATUS_ALLTRADED
statusMapReverse['4'] = STATUS_CANCELLED
statusMapReverse['5'] = STATUS_REJECTED
statusMapReverse['6'] = STATUS_CANCELLED
statusMapReverse['7'] = STATUS_CANCELLED
statusMapReverse['8'] = STATUS_UNKNOWN
statusMapReverse['21'] = STATUS_UNKNOWN
statusMapReverse['22'] = STATUS_UNKNOWN
statusMapReverse['23'] = STATUS_UNKNOWN
########################################################################
class FutuGateway(VtGateway):
"""富途接口"""
#----------------------------------------------------------------------
def __init__(self, eventEngine, gatewayName='FUTU'):
"""Constructor"""
super(FutuGateway, self).__init__(eventEngine, gatewayName)
self.quoteCtx = None
self.tradeCtx = None
self.host = ''
self.ip = 0
self.market = ''
self.password = ''
self.env = 1 # 默认仿真交易
self.fileName = self.gatewayName + '_connect.json'
self.filePath = getJsonPath(self.fileName, __file__)
self.tickDict = {}
self.tradeSet = set() # 保存成交编号的集合,防止重复推送
self.qryEnabled = True
self.qryThread = Thread(target=self.qryData)
#----------------------------------------------------------------------
def writeLog(self, content):
"""输出日志"""
log = VtLogData()
log.gatewayName = self.gatewayName
log.logContent = content
self.onLog(log)
#----------------------------------------------------------------------
def writeError(self, code, msg):
"""输出错误"""
error = VtErrorData()
error.gatewayName = self.gatewayName
error.errorID = code
error.errorMsg = msg
self.onError(error)
#----------------------------------------------------------------------
def connect(self):
"""连接"""
# 载入配置
try:
f = open(self.filePath)
setting = json.load(f)
self.host = setting['host']
self.port = setting['port']
self.market = setting['market']
self.password = setting['password']
self.env = setting['env']
except:
self.writeLog(u'载入配置文件出错')
return
self.connectQuote()
self.connectTrade()
self.qryThread.start()
#----------------------------------------------------------------------
def qryData(self):
"""初始化时查询数据"""
# 等待2秒保证行情和交易接口启动完成
sleep(2.0)
# 查询合约、成交、委托、持仓、账户
self.qryContract()
self.qryTrade()
self.qryOrder()
self.qryPosition()
self.qryAccount()
# 启动循环查询
self.initQuery()
#----------------------------------------------------------------------
def connectQuote(self):
"""连接行情功能"""
self.quoteCtx = ft.OpenQuoteContext(self.host, self.port)
# 继承实现处理器类
class QuoteHandler(StockQuoteHandlerBase):
"""报价处理器"""
gateway = self # 缓存Gateway对象
def on_recv_rsp(self, rsp_str):
ret_code, content = super(QuoteHandler, self).on_recv_rsp(rsp_str)
if ret_code != RET_OK:
return RET_ERROR, content
self.gateway.processQuote(content)
return RET_OK, content
class OrderBookHandler(OrderBookHandlerBase):
"""订单簿处理器"""
gateway = self
def on_recv_rsp(self, rsp_str):
ret_code, content = super(OrderBookHandler, self).on_recv_rsp(rsp_str)
if ret_code != RET_OK:
return RET_ERROR, content
self.gateway.processOrderBook(content)
return RET_OK, content
# 设置回调处理对象
self.quoteCtx.set_handler(QuoteHandler())
self.quoteCtx.set_handler(OrderBookHandler())
# 启动行情
self.quoteCtx.start()
self.writeLog(u'行情接口连接成功')
#----------------------------------------------------------------------
def connectTrade(self):
"""连接交易功能"""
# 连接交易接口
if self.market == 'US':
self.tradeCtx = ft.OpenUSTradeContext(self.host, self.port)
OrderHandlerBase = USTradeOrderHandlerBase
DealHandlerBase = USTradeDealHandlerBase
else:
self.tradeCtx = ft.OpenHKTradeContext(self.host, self.port)
OrderHandlerBase = HKTradeOrderHandlerBase
DealHandlerBase = HKTradeDealHandlerBase
# 继承实现处理器类
class OrderHandler(OrderHandlerBase):
"""委托处理器"""
gateway = self # 缓存Gateway对象
def on_recv_rsp(self, rsp_str):
ret_code, content = super(OrderHandler, self).on_recv_rsp(rsp_str)
if ret_code != RET_OK:
return RET_ERROR, content
self.gateway.processOrder(content)
return RET_OK, content
class DealHandler(DealHandlerBase):
"""订单簿处理器"""
gateway = self
def on_recv_rsp(self, rsp_str):
ret_code, content = super(DealHandler, self).on_recv_rsp(rsp_str)
if ret_code != RET_OK:
return RET_ERROR, content
self.gateway.processDeal(content)
return RET_OK, content
# 只有港股实盘交易才需要解锁
if self.market == 'HK' and self.env == 0:
self.tradeCtx.unlock_trade(self.password)
# 设置回调处理对象
self.tradeCtx.set_handler(OrderHandler())
self.tradeCtx.set_handler(DealHandler())
# 启动交易接口
self.tradeCtx.start()
# 订阅委托推送
self.tradeCtx.subscribe_order_deal_push([],
order_deal_push=True,
envtype=self.env)
self.writeLog(u'交易接口连接成功')
#----------------------------------------------------------------------
def subscribe(self, subscribeReq):
"""订阅行情"""
for data_type in ['QUOTE', 'ORDER_BOOK']:
code, data = self.quoteCtx.subscribe(subscribeReq.symbol, data_type, True)
if code:
self.writeError(code, u'订阅行情失败:%s' %data)
#----------------------------------------------------------------------
def sendOrder(self, orderReq):
"""发单"""
side = directionMap[orderReq.direction]
priceType = 0 # 只支持限价单
code, data = self.tradeCtx.place_order(orderReq.price, orderReq.volume,
orderReq.symbol, side,
priceType, self.env,
order_deal_push=True)
if code:
self.writeError(code, u'委托失败:%s' %data)
return ''
for ix, row in data.iterrows():
orderID = str(row['orderid'])
vtOrderID = '.'.join([self.gatewayName, orderID])
return vtOrderID
#----------------------------------------------------------------------
def cancelOrder(self, cancelOrderReq):
"""撤单"""
code, data = self.tradeCtx.set_order_status(0, int(cancelOrderReq.orderID),
self.env)
if code:
self.writeError(code, u'撤单失败:%s' %data)
return
#----------------------------------------------------------------------
def qryContract(self):
"""查询合约"""
for vtProductClass, product in productMap.items():
code, data = self.quoteCtx.get_stock_basicinfo(self.market, product)
if code:
self.writeError(code, u'查询合约信息失败:%s' %data)
return
for ix, row in data.iterrows():
contract = VtContractData()
contract.gatewayName = self.gatewayName
contract.symbol = row['code']
contract.vtSymbol = contract.symbol
contract.name = row['name']
contract.productClass = vtProductClass
contract.size = int(row['lot_size'])
contract.priceTick = 0.01
self.onContract(contract)
self.writeLog(u'合约信息查询成功')
#----------------------------------------------------------------------
def qryAccount(self):
"""查询账户资金"""
code, data = self.tradeCtx.accinfo_query(self.env)
if code:
self.writeError(code, u'查询账户资金失败:%s' %data)
return
for ix, row in data.iterrows():
account = VtAccountData()
account.gatewayName = self.gatewayName
account.accountID = '%s_%s' %(self.gatewayName, self.market)
account.vtAccountID = '.'.join([self.gatewayName, account.accountID])
account.balance = float(row['ZCJZ'])
account.margin = float(row['GPBZJ'])
account.available = float(row['XJJY'])
self.onAccount(account)
#----------------------------------------------------------------------
def qryPosition(self):
"""查询持仓"""
code, data = self.tradeCtx.position_list_query(envtype=self.env)
if code:
self.writeError(code, u'查询持仓失败:%s' %data)
return
for ix, row in data.iterrows():
pos = VtPositionData()
pos.gatewayName = self.gatewayName
pos.symbol = row['code']
pos.vtSymbol = pos.symbol
pos.direction = DIRECTION_LONG
pos.vtPositionName = '.'.join([pos.vtSymbol, pos.direction])
pos.position = int(row['qty'])
pos.price = float(row['cost_price'])
pos.positionProfit = float(row['pl_val'])
pos.frozen = int(row['qty']) - int(row['can_sell_qty'])
if pos.price < 0: pos.price = 0
if pos.positionProfit > 100000000: pos.positionProfit = 0
self.onPosition(pos)
#----------------------------------------------------------------------
def qryOrder(self):
"""查询委托"""
code, data = self.tradeCtx.order_list_query("", envtype=self.env)
if code:
self.writeError(code, u'查询委托失败:%s' %data)
return
self.processOrder(data)
self.writeLog(u'委托查询成功')
#----------------------------------------------------------------------
def qryTrade(self):
"""查询成交"""
code, data = self.tradeCtx.deal_list_query(self.env)
if code:
self.writeError(code, u'查询成交失败:%s' %data)
return
self.processDeal(data)
self.writeLog(u'成交查询成功')
#----------------------------------------------------------------------
def close(self):
"""关闭"""
if self.quoteCtx:
self.quoteCtx.close()
if self.tradeCtx:
self.tradeCtx.close()
#----------------------------------------------------------------------
def initQuery(self):
"""初始化连续查询"""
if self.qryEnabled:
# 需要循环的查询函数列表
self.qryFunctionList = [self.qryAccount, self.qryPosition]
self.qryCount = 0 # 查询触发倒计时
self.qryTrigger = 2 # 查询触发点
self.qryNextFunction = 0 # 上次运行的查询函数索引
self.startQuery()
#----------------------------------------------------------------------
def query(self, event):
"""注册到事件处理引擎上的查询函数"""
self.qryCount += 1
if self.qryCount > self.qryTrigger:
# 清空倒计时
self.qryCount = 0
# 执行查询函数
function = self.qryFunctionList[self.qryNextFunction]
function()
# 计算下次查询函数的索引,如果超过了列表长度,则重新设为0
self.qryNextFunction += 1
if self.qryNextFunction == len(self.qryFunctionList):
self.qryNextFunction = 0
#----------------------------------------------------------------------
def startQuery(self):
"""启动连续查询"""
self.eventEngine.register(EVENT_TIMER, self.query)
#----------------------------------------------------------------------
def setQryEnabled(self, qryEnabled):
"""设置是否要启动循环查询"""
self.qryEnabled = qryEnabled
#----------------------------------------------------------------------
def processQuote(self, data):
"""报价推送"""
for ix, row in data.iterrows():
symbol = row['code']
tick = self.tickDict.get(symbol, None)
if not tick:
tick = VtTickData()
tick.symbol = symbol
tick.vtSymbol = tick.symbol
tick.gatewayName = self.gatewayName
self.tickDict[symbol] = tick
tick.date = row['data_date'].replace('-', '')
tick.time = row['data_time']
tick.datetime = datetime.strptime(' '.join([tick.date, tick.time]), '%Y%m%d %H:%M:%S')
tick.openPrice = row['open_price']
tick.highPrice = row['high_price']
tick.lowPrice = row['low_price']
tick.preClosePrice = row['prev_close_price']
tick.lastPrice = row['last_price']
tick.volume = row['volume']
newTick = copy(tick)
self.onTick(newTick)
#----------------------------------------------------------------------
def processOrderBook(self, data):
"""订单簿推送"""
symbol = data['stock_code']
tick = self.tickDict.get(symbol, None)
if not tick:
tick = VtTickData()
tick.symbol = symbol
tick.vtSymbol = tick.symbol
tick.gatewayName = self.gatewayName
self.tickDict[symbol] = tick
d = tick.__dict__
for i in range(5):
bidData = data['Bid'][i]
askData = data['Ask'][i]
n = i + 1
d['bidPrice%s' %n] = bidData[0]
d['bidVolume%s' %n] = bidData[1]
d['askPrice%s' %n] = askData[0]
d['askVolume%s' %n] = askData[1]
newTick = copy(tick)
self.onTick(newTick)
#----------------------------------------------------------------------
def processOrder(self, data):
"""处理委托推送"""
for ix, row in data.iterrows():
# 如果状态是已经删除,则直接忽略
if str(row['status']) == '7':
continue
order = VtOrderData()
order.gatewayName = self.gatewayName
order.symbol = row['code']
order.vtSymbol = order.symbol
order.orderID = str(row['orderid'])
order.vtOrderID = '.'.join([self.gatewayName, order.orderID])
order.price = float(row['price'])
order.totalVolume = int(row['qty'])
order.tradedVolume = int(row['dealt_qty'])
t = datetime.fromtimestamp(float(row['submited_time']))
order.orderTime = t.strftime('%H:%M:%S')
order.status = statusMapReverse.get(str(row['status']), STATUS_UNKNOWN)
order.direction = directionMapReverse[str(row['order_side'])]
self.onOrder(order)
#----------------------------------------------------------------------
def processDeal(self, data):
"""处理成交推送"""
for ix, row in data.iterrows():
tradeID = row['dealid']
if tradeID in self.tradeSet:
continue
self.tradeSet.add(tradeID)
trade = VtTradeData()
trade.gatewayName = self.gatewayName
trade.symbol = row['code']
trade.vtSymbol = trade.symbol
trade.tradeID = tradeID
trade.vtTradeID = '.'.join([self.gatewayName, trade.tradeID])
trade.orderID = row['orderid']
trade.vtOrderID = '.'.join([self.gatewayName, trade.orderID])
trade.price = float(row['price'])
trade.volume = float(row['qty'])
trade.direction = directionMapReverse[str(row['order_side'])]
t = datetime.fromtimestamp(float(row['time']))
trade.tradeTime = t.strftime('%H:%M:%S')
self.onTrade(trade)
| 34.73724
| 98
| 0.469525
|
0f7e31cf8fba2c7660a89034458ef83d704f3742
| 1,788
|
py
|
Python
|
django_before/json_settings.py
|
alekseyr/django-before
|
f1981ea332a7caf8bdcb9d214a27a3c1555b4f39
|
[
"MIT"
] | 1
|
2016-05-20T09:37:39.000Z
|
2016-05-20T09:37:39.000Z
|
django_before/json_settings.py
|
alekseyr/django-before
|
f1981ea332a7caf8bdcb9d214a27a3c1555b4f39
|
[
"MIT"
] | null | null | null |
django_before/json_settings.py
|
alekseyr/django-before
|
f1981ea332a7caf8bdcb9d214a27a3c1555b4f39
|
[
"MIT"
] | null | null | null |
import json
from .exceptions import DjangoBeforeImproperlyConfigured, DjangoBeforeNotImplemented
def make_json_settings_reader(settings_filename):
reader = _JSONSettingsReader(settings_filename)
return reader
class _JSONSettingsReader(object):
def __init__(self, settings_filename):
self.settings_filename = settings_filename
self.settings = self._load_settings(settings_filename)
def __getitem__(self, setting_name):
try:
return self.settings[setting_name]
except KeyError:
raise DjangoBeforeImproperlyConfigured('Setting with name "%s" has not found in file "%s".' %
(setting_name, self.settings_filename))
def __setitem__(self, *args, **kwargs):
raise DjangoBeforeNotImplemented('JSONSettingsReader does not allow set up settings values.')
@staticmethod
def _load_settings(settings_filename):
settings = None
try:
file = open(settings_filename)
except IOError:
raise DjangoBeforeImproperlyConfigured('Failed to open setting\'s file with name "%s".' % settings_filename)
else:
with file:
try:
settings = json.load(file)
except json.JSONDecodeError as e:
raise DjangoBeforeImproperlyConfigured('Failed to parse settings file "%s". JSON error: "%s".' %
(settings_filename, e))
if not isinstance(settings, dict):
raise DjangoBeforeImproperlyConfigured('Setting\'s loaded from file with name "%s" are not in dict format.' %
settings_filename)
return settings
| 38.869565
| 121
| 0.619128
|
82d40660c9bf06ad163b1c6a34e757abdef25915
| 93,963
|
py
|
Python
|
modules/pymol/menu.py
|
kingdavid72/Pymol
|
91ddc53199f40f12d186dee2a3745cd777a57877
|
[
"CNRI-Python"
] | null | null | null |
modules/pymol/menu.py
|
kingdavid72/Pymol
|
91ddc53199f40f12d186dee2a3745cd777a57877
|
[
"CNRI-Python"
] | null | null | null |
modules/pymol/menu.py
|
kingdavid72/Pymol
|
91ddc53199f40f12d186dee2a3745cd777a57877
|
[
"CNRI-Python"
] | null | null | null |
#A* -------------------------------------------------------------------
#B* This file contains source code for the PyMOL computer program
#C* Copyright (c) Schrodinger, LLC.
#D* -------------------------------------------------------------------
#E* It is unlawful to modify or remove this copyright notice.
#F* -------------------------------------------------------------------
#G* Please see the accompanying LICENSE file for further information.
#H* -------------------------------------------------------------------
#I* Additional authors of this source file include:
#-*
#-*
#-*
#Z* -------------------------------------------------------------------
# This module defines the menus and their built-in commands
def extract(self_cmd, sele):
return [[ 2, 'Extract', '' ],
[ 1, 'object', 'cmd.create(None,"'+sele+'",extract="'+sele+'",zoom=0)' ],
[ 1, 'extend 1', 'cmd.create(None,"('+sele+') extend 1",extract="'+sele+'",zoom=0)' ],
[ 1, 'byres extend 1', 'cmd.create(None,"byres (('+sele+') extend 1)",extract="'+sele+'",zoom=0)' ],
]
def camera_store_with_scene(self_cmd,frame):
list = self_cmd.get_scene_list()[0:40] # keep this practical
result = [[ 2, 'Scene:', '']]
for a in list:
result.append([1,a,'cmd.mview("store",scene="'+a+'",first=%s)'%frame])
return result
def store_with_state(self_cmd,obj='',frame=0):
list = self_cmd.get_scene_list()[0:40] # keep this practical
n_state = self_cmd.count_states()
result = [[ 2, 'State:', ''],
[ 1, 'current','cmd.mview("store",object="'+obj+'",state=-1,first=%s)'%(frame)],
[ 0, '' ,'' ],
[ 1, '1', 'cmd.mview("store",object="'+obj+'",state=1,first=%s)'%(frame) ],
]
if (n_state>1):
result.extend([
[ 1, str(n_state),'cmd.mview("store",object="'+obj+'",state=%d,first=%s)'%(n_state,frame)],
[ 0, '' ,'' ],
[ 1, str(1+n_state/4), 'cmd.mview("store",object="'+obj+'",state=%d,first=%s)'%(1+n_state/4,frame) ],
[ 1, str(1+n_state/3), 'cmd.mview("store",object="'+obj+'",state=%d,first=%s)'%(1+n_state/3,frame) ],
[ 1, str(n_state/2), 'cmd.mview("store",object="'+obj+'",state=%d,first=%s)'%(n_state/2,frame) ],
[ 1, str(1+n_state/2), 'cmd.mview("store",object="'+obj+'",state=%d,first=%s)'%(1+n_state/2,frame) ],
[ 1, str((2*n_state)/3), 'cmd.mview("store",object="'+obj+'",state=%d,first=%s)'%((2*n_state)/3,frame) ],
[ 1, str((3*n_state)/4), 'cmd.mview("store",object="'+obj+'",state=%d,first=%s)'%((3*n_state)/4,frame) ],
])
return result
def mouse_config(self_cmd):
result = [[ 1, '3-Button Motions',
'cmd.config_mouse("three_button_motions")' ],
[ 1, '3-Button Editing',
'cmd.config_mouse("three_button_editing")' ],
[ 1, '3-Button Viewing',
'cmd.mouse("three_button_viewing")' ],
[ 1, '3-Button Lights',
'cmd.mouse("three_button_lights")' ],
[ 1, '3-Button All Modes',
'cmd.config_mouse("three_button_all_modes")' ],
[ 0, '', ''],
[ 1, '2-Button Editing',
'cmd.config_mouse("two_button_editing")' ],
[ 1, '2-Button Viewing',
'cmd.config_mouse("two_button_viewing")' ],
[ 1, '2-Button Lights',
'cmd.mouse("two_button_lights")' ],
]
return result
def smooth(self_cmd,extra=''):
return [[ 1, 'a little' , 'cmd.mview("smooth"%s)'%extra ],
[ 1, 'more' , 'cmd.mview("smooth",window=15%s)'%extra ],
[ 1, 'a lot' , 'cmd.mview("smooth",window=30%s)'%extra ]]
def camera_motion(self_cmd, frame="0"):
return [[ 2, 'Camera Motion:' , '' ],
[ 1, 'store' , 'cmd.mview("store",first='+frame+')' ],
[ 1, 'store with scene' , camera_store_with_scene(self_cmd,frame) ],
[ 1, 'store with state' , store_with_state(self_cmd,'',frame) ],
[ 1, 'clear' , 'cmd.mview("clear",first='+frame+')' ],
[ 0, '' ,'' ],
[ 1, 'reset camera motions' , 'cmd.mview("reset")' ],
[ 0, '' ,'' ],
[ 1, 'purge entire movie' , 'cmd.mset()' ],
[ 0, '' ,'' ],
[ 1, 'smooth key frames' , smooth(self_cmd) ],
[ 0, '' ,'' ],
[ 1, 'interpolate' , 'cmd.mview("interpolate")' ],
[ 1, 'reinterpolate' , 'cmd.mview("reinterpolate")' ],
[ 1, 'uninterpolate' , 'cmd.mview("uninterpolate")' ],
]
def obj_motion(self_cmd, obj, frame="0"):
return [[ 2, 'Object "'+obj+'" Motion:' , '' ],
[ 1, 'drag' , 'cmd.drag("'+obj+'")' ],
[ 0, '' ,'' ],
[ 1, 'store' , 'cmd.mview("store",object="'+obj+'",first='+frame+')' ],
[ 1, 'store with state' , store_with_state(self_cmd,obj,frame) ],
[ 1, 'reset' , ';cmd.reset(object="'+obj+'");' ],
[ 1, 'clear' , 'cmd.mview("clear",object="'+obj+'",first='+frame+')' ],
[ 0, '' ,'' ],
[ 1, 'reset object motions' , 'cmd.mview("reset",object="'+obj+'")' ],
[ 1, 'purge object motions' , 'cmd.mview("purge",object="'+obj+'")' ],
[ 0, '' ,'' ],
[ 1, 'smooth key frames' , smooth(self_cmd,',object="'+obj+'"') ],
[ 0, '' ,'' ],
[ 1, 'interpolate' , 'cmd.mview("interpolate",object="'+obj+'")' ],
[ 1, 'reinterpolate' , 'cmd.mview("reinterpolate",object="'+obj+'")' ],
[ 1, 'uninterpolate' , 'cmd.mview("uninterpolate",object="'+obj+'")' ],
]
def rep_action(self_cmd, sele, action) :
return [
[ 1, 'lines' , 'cmd.'+action+'("lines" ,"'+sele+'")' ],
[ 1, 'sticks' , 'cmd.'+action+'("sticks" ,"'+sele+'")' ],
[ 1, 'ribbon' , 'cmd.'+action+'("ribbon" ,"'+sele+'")' ],
[ 1, 'cartoon' , 'cmd.'+action+'("cartoon" ,"'+sele+'")' ],
[ 0, '' , '' ],
[ 1, 'label' , 'cmd.'+action+'("labels" ,"'+sele+'")' ],
[ 1, 'cell' , 'cmd.'+action+'("cell" ,"'+sele+'")' ],
[ 0, '' , '' ],
[ 1, 'nonbonded' , 'cmd.'+action+'("nonbonded" ,"'+sele+'")' ],
[ 1, 'dots' , 'cmd.'+action+'("dots" ,"'+sele+'")' ],
[ 1, 'spheres' , 'cmd.'+action+'("spheres" ,"'+sele+'")' ],
[ 1, 'nb_spheres' , 'cmd.'+action+'("nb_spheres","'+sele+'")' ],
[ 0, '' , '' ],
[ 1, 'mesh' , 'cmd.'+action+'("mesh" ,"'+sele+'")' ],
[ 1, 'surface' , 'cmd.'+action+'("surface" ,"'+sele+'")' ],
]
def mol_as(self_cmd, sele):
return (
[[ 2, 'As:' , '']]
+rep_action(self_cmd, sele,'show_as')
)
def mol_toggle(self_cmd, sele):
return (
[[ 2, 'As:' , '']]
+rep_action(self_cmd, sele,'toggle')
)
def show_misc(self_cmd, sele):
return [[ 2, 'Show:', '' ],
[ 1, 'lines', 'cmd.show("lines","'+sele+'")'],
[ 1, 'sticks', 'cmd.show("sticks","'+sele+'")'],
[ 1, 'spheres', 'cmd.show("spheres","'+sele+'")'],
]
def mol_show(self_cmd, sele):
return (
[[ 2, 'Show:' , '' ],
[ 1, 'as' , mol_as(self_cmd, sele) ],
[ 0, '', '']]
+ rep_action(self_cmd, sele,'show') +
[[ 0, '', ''],
[ 1, 'organic' , show_misc(self_cmd, '(organic and ('+sele+'))') ],
[ 1, 'main chain' , show_misc(self_cmd, "((byres ("+sele+"))&n;ca,c,n,o,h)") ],
[ 1, 'side chain' , show_misc(self_cmd, "((byres ("+sele+"))&(!(n;c,o,h|(n. n&!r. pro))))") ],
[ 1, 'disulfides' , show_misc(self_cmd, "(byres ((("+sele+
") & r. CYS+CYX & n. SG) & bound_to (("+sele+") & r. CYS+CYX & n. SG))) & n. CA+CB+SG") ]
] +
[[ 0, '', ''],
[ 1, 'valence', 'cmd.set_bond("valence", "1", "'+sele+'",quiet=1)'],
] )
self_cmd.show("lines","(byres ((" + sele + " & r. CYS+CYX & n. SG) & bound_to ("
+ sele + " & r. CYS+CYX & n. SG))) & n. CA+CB+SG")
def hide_hydro(self_cmd, sele):
return ( [[ 2, 'Hide:' , '' ],
[ 1, 'all' , 'cmd.hide("('+sele+' and hydro)")' ],
[ 1, 'nonpolar' , 'cmd.hide("('+sele+' and hydro and (elem c extend 1))")' ],
] )
def mol_hide(self_cmd, sele):
return (
[[ 2, 'Hide:' , '' ],
[ 1, 'everything', 'cmd.hide("everything","'+sele+'")' ],
[ 0, '' , '' ]]
+ rep_action(self_cmd, sele,'hide') +
[[ 0, '' , '' ],
[ 1, 'main chain', 'cmd.hide("((byres ('+sele+'))&(n. c,o,h|(n. n&!r. pro)))")' ],
[ 1, 'side chain', 'cmd.hide("((byres ('+sele+'))&!(n. ca,c,o,h|(n. n&!r. pro)))")' ],
[ 1, 'waters' , 'cmd.hide("(solvent and ('+sele+'))")' ],
[ 0, '' , '' ],
[ 1, 'hydrogens' , hide_hydro(self_cmd, sele) ],
[ 0, '' , '' ],
[ 1, 'unselected', 'cmd.hide("(not '+sele+')")' ],
]
+ [[ 0, '', ''],
[ 1, 'valence', 'cmd.set_bond("valence", "0", "'+sele+'",quiet=1)'],
] )
def measurement_show(self_cmd, sele):
return [[ 2, 'Show:' , '' ],
[ 1, 'dashes' , 'cmd.show("dashes" ,"'+sele+'")' ],
[ 1, 'angles' , 'cmd.show("angles" ,"'+sele+'")' ],
[ 1, 'dihedrals' , 'cmd.show("dihedrals" ,"'+sele+'")' ],
[ 1, 'labels' , 'cmd.show("labels" ,"'+sele+'")' ]
]
def measurement_hide(self_cmd, sele):
return [[ 2, 'Hide:' , '' ],
[ 1, 'dashes' , 'cmd.hide("dashes" ,"'+sele+'")' ],
[ 1, 'angles' , 'cmd.hide("angles" ,"'+sele+'")' ],
[ 1, 'dihedrals' , 'cmd.hide("dihedrals" ,"'+sele+'")' ],
[ 1, 'labels' , 'cmd.hide("labels" ,"'+sele+'")' ]
]
def cgo_show(self_cmd, sele):
return [[ 2, 'Show:' , '' ],
[ 1, 'cgo' , 'cmd.show("cgo" ,"'+sele+'")' ],
]
def cgo_hide(self_cmd, sele):
return [[ 2, 'Hide:' , '' ],
[ 1, 'cgo' , 'cmd.hide("cgo" ,"'+sele+'")' ],
]
def simple_show(self_cmd, sele):
return [[ 2, 'Show:' , '' ],
[ 1, 'everything' , 'cmd.show("everything","'+sele+'")' ]]
def simple_hide(self_cmd, sele):
return [[ 2, 'Hide:' ,'' ],
[ 1, 'everything' ,'cmd.hide("everything","'+sele+'")' ]]
def map_show(self_cmd, sele):
return [[ 2, 'Show:' , '' ],
[ 1, 'dots' , 'cmd.show("dots","'+sele+'")' ],
[ 1, 'extent' , 'cmd.show("extent","'+sele+'")' ],
[ 1, 'everything' , 'cmd.show("everything","'+sele+'")' ]]
def map_hide(self_cmd, sele):
return [[ 2, 'Hide:' ,'' ],
[ 1, 'dots' , 'cmd.hide("dots","'+sele+'")' ],
[ 1, 'extent' , 'cmd.hide("extent","'+sele+'")' ],
[ 1, 'everything' ,'cmd.hide("everything","'+sele+'")' ]]
def mesh_show(self_cmd, sele):
return [[ 2, 'Show:' , '' ],
[ 1, 'mesh' , 'cmd.show("mesh","'+sele+'")' ],
[ 1, 'cell' , 'cmd.show("cell","'+sele+'")' ],
[ 1, 'everything' , 'cmd.show("everything","'+sele+'")' ]]
def mesh_hide(self_cmd, sele):
return [[ 2, 'Hide:' , '' ],
[ 1, 'mesh' , 'cmd.hide("mesh","'+sele+'")' ],
[ 1, 'cell' , 'cmd.hide("cell","'+sele+'")' ],
[ 1, 'everything' , 'cmd.hide("everything","'+sele+'")' ]]
def surface_show(self_cmd, sele):
return [[ 2, 'Show:' , '' ],
[ 1, 'surface' , 'cmd.show("surface","'+sele+'")' ],
[ 1, 'cell' , 'cmd.show("cell","'+sele+'")' ],
[ 1, 'everything' , 'cmd.show("everything","'+sele+'")' ]]
def surface_hide(self_cmd, sele):
return [[ 2, 'Hide:' , '' ],
[ 1, 'surface' , 'cmd.hide("surface","'+sele+'")' ],
[ 1, 'cell' , 'cmd.hide("cell","'+sele+'")' ],
[ 1, 'everything' , 'cmd.hide("everything","'+sele+'")' ]]
def slice_show(self_cmd, sele):
return [[ 2, 'Show:' , '' ],
[ 1, 'slice' , 'cmd.show("slice","'+sele+'")' ],
]
def slice_hide(self_cmd, sele):
return [[ 2, 'Hide:' , '' ],
[ 1, 'slice' , 'cmd.hide("slice","'+sele+'")' ],
]
def volume_show(self_cmd, sele):
return [[ 2, 'Show:' , '' ],
[ 1, 'volume' , 'cmd.show("volume","'+sele+'")' ],
[ 1, 'extent' , 'cmd.show("extent","'+sele+'")' ],
]
def volume_hide(self_cmd, sele):
return [[ 2, 'Hide:' , '' ],
[ 1, 'volume' , 'cmd.hide("volume","'+sele+'")' ],
[ 1, 'extent' , 'cmd.hide("extent","'+sele+'")' ],
]
def by_elem2(self_cmd, sele):
return [
[ 2, 'Atoms' ,'' ],
[1,'\\494C\\777H\\229N\\922O\\950S...','util.cba(10,"'+sele+'",_self=cmd)'],# lime
[1,'\\155C\\777H\\229N\\922O\\950S...','util.cba(5262,"'+sele+'",_self=cmd)'],# deepteal
[1,'\\904C\\777H\\229N\\922O\\950S...','util.cba(12,"'+sele+'",_self=cmd)'],# hotpink
[1,'\\983C\\777H\\229N\\922O\\950S...','util.cba(36,"'+sele+'",_self=cmd)'],# yelloworange
[1,'\\525C\\777H\\229N\\922O\\950S...','util.cba(5271,"'+sele+'",_self=cmd)'],# violetpurple
[1,'\\666C\\777H\\229N\\922O\\950S...','util.cba(124,"'+sele+'",_self=cmd)'],# grey70
[1,'\\049C\\777H\\229N\\922O\\950S...','util.cba(17,"'+sele+'",_self=cmd)'],# marine
[1,'\\760C\\777H\\229N\\922O\\950S...','util.cba(18,"'+sele+'",_self=cmd)'],# olive
]
def by_elem3(self_cmd, sele):
return [
[ 2, 'Atoms' ,'' ],
[1,'\\564C\\777H\\229N\\922O\\950S...','util.cba(5270,"'+sele+'",_self=cmd)'],# smudge
[1,'\\077C\\777H\\229N\\922O\\950S...','util.cba(20,"'+sele+'",_self=cmd)'],# teal
[1,'\\644C\\777H\\229N\\922O\\950S...','util.cba(5272,"'+sele+'",_self=cmd)'],# dirtyviolet
[1,'\\976C\\777H\\229N\\922O\\950S...','util.cba(52,"'+sele+'",_self=cmd)'],# wheat
[1,'\\944C\\777H\\229N\\922O\\950S...','util.cba(5258,"'+sele+'",_self=cmd)'],# deepsalmon
[1,'\\978C\\777H\\229N\\922O\\950S...','util.cba(5274,"'+sele+'",_self=cmd)'],# lightpink
[1,'\\499C\\777H\\229N\\922O\\950S...','util.cba(5257,"'+sele+'",_self=cmd)'],# aquamarine
[1,'\\994C\\777H\\229N\\922O\\950S...','util.cba(5256,"'+sele+'",_self=cmd)'],# paleyellow
]
def by_elem4(self_cmd, sele):
return [
[ 2, 'Atoms' ,'' ],
[1,'\\094C\\777H\\229N\\922O\\950S...','util.cba(15,"'+sele+'",_self=cmd)'],# limegreen
[1,'\\247C\\777H\\229N\\922O\\950S...','util.cba(5277,"'+sele+'",_self=cmd)'],# skyblue
[1,'\\824C\\777H\\229N\\922O\\950S...','util.cba(5279,"'+sele+'",_self=cmd)'],# warmpink
[1,'\\792C\\777H\\229N\\922O\\950S...','util.cba(5276,"'+sele+'",_self=cmd)'],# limon
[1,'\\949C\\777H\\229N\\922O\\950S...','util.cba(53,"'+sele+'",_self=cmd)'],# violet
[1,'\\889C\\777H\\229N\\922O\\950S...','util.cba(5278,"'+sele+'",_self=cmd)'],# bluewhite
[1,'\\297C\\777H\\229N\\922O\\950S...','util.cba(5275,"'+sele+'",_self=cmd)'],# greencyan
[1,'\\653C\\777H\\229N\\922O\\950S...','util.cba(5269,"'+sele+'",_self=cmd)'],# sand
]
def by_elem5(self_cmd, sele):
return [
[ 2, 'Atoms' ,'' ],
[1,'\\252C\\777H\\229N\\922O\\950S...','util.cba(22,"'+sele+'",_self=cmd)'],# forest
[1,'\\466C\\777H\\229N\\922O\\950S...','util.cba(5266,"'+sele+'",_self=cmd)'],# lightteal
[1,'\\755C\\777H\\229N\\922O\\950S...','util.cba(5280,"'+sele+'",_self=cmd)'],# darksalmon
[1,'\\570C\\777H\\229N\\922O\\950S...','util.cba(5267,"'+sele+'",_self=cmd)'],# splitpea
[1,'\\634C\\777H\\229N\\922O\\950S...','util.cba(5268,"'+sele+'",_self=cmd)'],# raspberry
[1,'\\555C\\777H\\229N\\922O\\950S...','util.cba(104,"'+sele+'",_self=cmd)'],# grey50
[1,'\\226C\\777H\\229N\\922O\\950S...','util.cba(23,"'+sele+'",_self=cmd)'],# deepblue
[1,'\\632C\\777H\\229N\\922O\\950S...','util.cba(51,"'+sele+'",_self=cmd)'],# brown
]
def by_elem6(self_cmd, sele):
return [
[ 2, 'Atoms' ,'' ],
[1,'\\191C\\911H\\229N\\922O\\950S...','util.cbh("tv_red","'+sele+'",_self=cmd)'],# tv_red
[1,'\\191C\\917H\\229N\\922O\\950S...','util.cbh("lightmagenta","'+sele+'",_self=cmd)'],# lightmagenta
[1,'\\191C\\119H\\229N\\922O\\950S...','util.cbh("tv_blue","'+sele+'",_self=cmd)'],# tv_blue
[1,'\\191C\\940H\\229N\\922O\\950S...','util.cbh("orange","'+sele+'",_self=cmd)'],# orange
[1,'\\191C\\870H\\229N\\922O\\950S...','util.cbh("olive","'+sele+'",_self=cmd)'],# olive
[1,'\\191C\\088H\\229N\\922O\\950S...','util.cbh("teal","'+sele+'",_self=cmd)'],# teal
[1,'\\191C\\521H\\229N\\922O\\950S...','util.cbh("chocolate","'+sele+'",_self=cmd)'],# chocolate
[1,'\\191C\\000H\\229N\\922O\\950S...','util.cbh("black","'+sele+'",_self=cmd)'],# black
]
def by_elem(self_cmd, sele):
return [
[ 2, 'Atoms' ,'' ],
[1,' \\777H\\229N\\922O\\950S...','util.cnc("'+sele+'",_self=cmd)'],
[1,'\\292C\\777H\\229N\\922O\\950S...','util.cba(33,"'+sele+'",_self=cmd)'],# tv_green
[1,'\\099C\\777H\\229N\\922O\\950S...','util.cba(5,"'+sele+'",_self=cmd)'],# cyan
[1,'\\927C\\777H\\229N\\922O\\950S...','util.cba(154,"'+sele+'",_self=cmd)'],# lightmagenta
[1,'\\990C\\777H\\229N\\922O\\950S...','util.cba(6,"'+sele+'",_self=cmd)'],# yellow
[1,'\\955C\\777H\\229N\\922O\\950S...','util.cba(9,"'+sele+'",_self=cmd)'],# salmon
[1,'\\888C\\777H\\229N\\922O\\950S...','util.cba(144,"'+sele+'",_self=cmd)'],# grey90
[1,'\\449C\\777H\\229N\\922O\\950S...','util.cba(11,"'+sele+'",_self=cmd)'],# slate
[1,'\\962C\\777H\\229N\\922O\\950S...','util.cba(13,"'+sele+'",_self=cmd)'],# orange
[ 1, 'set 2' ,by_elem2(self_cmd, sele) ],
[ 1, 'set 3' ,by_elem3(self_cmd, sele) ],
[ 1, 'set 4' ,by_elem4(self_cmd, sele) ],
[ 1, 'set 5' ,by_elem5(self_cmd, sele) ],
[ 1, 'set 6/H' ,by_elem6(self_cmd, sele) ],
]
def by_ss(self_cmd, sele):
return [
[ 2, 'By Secondary Structure:' ,'' ],
[ 1, '\\900Helix \\990Sheet \\090Loop' , 'util.cbss("'+sele+'","red","yellow","green",_self=cmd)'],
[ 1, '\\099Helix \\909Sheet \\955Loop' , 'util.cbss("'+sele+'","cyan","magenta","salmon",_self=cmd)'],
[ 1, '\\099Helix \\900Sheet \\909Loop' , 'util.cbss("'+sele+'","cyan","red","magenta",_self=cmd)'],
]
def spectrum(self_cmd, sele):
return [
[ 2, 'Spectrum:' ,'' ],
[ 1, '\\900r\\950a\\990i\\090n\\099b\\059o\\009w\\888(e. c)',
'cmd.spectrum("count",selection="('+sele+')&e. c")'],
[ 1, '\\900r\\950a\\990i\\090n\\099b\\059o\\009w\\888(*/ca)',
'cmd.spectrum("count",selection="('+sele+')&*/ca")'],
[ 1, '\\900r\\950a\\990i\\090n\\099b\\059o\\009w',
'cmd.spectrum("count",selection="'+sele+'",byres=1)'],
[ 0, '' , '' ],
[ 1, 'b-factors' , 'cmd.spectrum("b",selection=("'+sele+'"),quiet=0)' ],
[ 1, 'b-factors(*/ca)' , 'cmd.spectrum("b",selection="(('+sele+')&*/ca)",quiet=0)' ],
[ 0, '' , '' ],
[ 1, 'area (molecular)', 'util.color_by_area(("'+sele+'"),"molecular")' ],
[ 1, 'area (solvent)' , 'util.color_by_area(("'+sele+'"),"solvent")' ],
]
def by_chain(self_cmd, sele):
return [
[ 2, 'By Chain:' ,'' ],
[ 1, '\\900b\\950y \\090c\\099h\\059a\\009i\\705n\\888(e. c)',
'util.color_chains("('+sele+' and elem c)",_self=cmd)'],
[ 1, '\\900b\\950y \\090c\\099h\\059a\\009i\\705n\\888(*/ca)',
'util.color_chains("('+sele+' and name ca)",_self=cmd)'],
[ 1, '\\900b\\950y \\090c\\099h\\059a\\009i\\705n',
'util.color_chains("('+sele+')",_self=cmd)'],
[ 0, '' , '' ],
[ 1, '\\900c\\950h\\990a\\090i\\099n\\059b\\009o\\705w\\888s',
'util.chainbow("('+sele+')",_self=cmd)'],
]
def reds(self_cmd, sele):
return [
[ 2, 'Reds' ,'' ],
[1,'\\900red','cmd.color(4,"'+sele+'")'],
[1,'\\922tv_red','cmd.color(32,"'+sele+'")'],
[1,'\\634raspberry','cmd.color(5268,"'+sele+'")'],
[1,'\\755darksalmon','cmd.color(5280,"'+sele+'")'],
[1,'\\955salmon','cmd.color(9,"'+sele+'")'],
[1,'\\944deepsalmon','cmd.color(5258,"'+sele+'")'],
[1,'\\824warmpink','cmd.color(5279,"'+sele+'")'],
[1,'\\611firebrick','cmd.color(49,"'+sele+'")'],
[1,'\\522ruby','cmd.color(21,"'+sele+'")'],
[1,'\\521chocolate','cmd.color(50,"'+sele+'")'],
[1,'\\632brown','cmd.color(51,"'+sele+'")'],
]
def greens(self_cmd, sele):
return [
[ 2, 'Greens' ,'' ],
[1,'\\090green','cmd.color(3,"'+sele+'")'],
[1,'\\292tv_green','cmd.color(33,"'+sele+'")'],
[1,'\\490chartreuse','cmd.color(14,"'+sele+'")'],
[1,'\\570splitpea','cmd.color(5267,"'+sele+'")'],
[1,'\\564smudge','cmd.color(5270,"'+sele+'")'],
[1,'\\686palegreen','cmd.color(5259,"'+sele+'")'],
[1,'\\094limegreen','cmd.color(15,"'+sele+'")'],
[1,'\\494lime','cmd.color(10,"'+sele+'")'],
[1,'\\792limon','cmd.color(5276,"'+sele+'")'],
[1,'\\252forest','cmd.color(22,"'+sele+'")'],
]
def blues(self_cmd, sele):
return [
[ 2, 'Blues' ,'' ],
[1,'\\009blue','cmd.color(2,"'+sele+'")'],
[1,'\\339tv_blue','cmd.color(34,"'+sele+'")'],
[1,'\\049marine','cmd.color(17,"'+sele+'")'],
[1,'\\449slate','cmd.color(11,"'+sele+'")'],
[1,'\\779lightblue','cmd.color(5263,"'+sele+'")'],
[1,'\\247skyblue','cmd.color(5277,"'+sele+'")'],
[1,'\\409purpleblue','cmd.color(16,"'+sele+'")'],
[1,'\\226deepblue','cmd.color(23,"'+sele+'")'],
[1,'\\115density','cmd.color(4155,"'+sele+'")'],
]
def yellows(self_cmd, sele):
return [
[ 2, 'Yellows' ,'' ],
[1,'\\990yellow','cmd.color(6,"'+sele+'")'],
[1,'\\992tv_yellow','cmd.color(35,"'+sele+'")'],
[1,'\\994paleyellow','cmd.color(5256,"'+sele+'")'],
[1,'\\983yelloworange','cmd.color(36,"'+sele+'")'],
[1,'\\792limon','cmd.color(5276,"'+sele+'")'],
[1,'\\976wheat','cmd.color(52,"'+sele+'")'],
[1,'\\653sand','cmd.color(5269,"'+sele+'")'],
]
def magentas(self_cmd, sele):
return [
[ 2, 'Magentas' ,'' ],
[1,'\\909magenta','cmd.color(8,"'+sele+'")'],
[1,'\\927lightmagenta','cmd.color(154,"'+sele+'")'],
[1,'\\904hotpink','cmd.color(12,"'+sele+'")'],
[1,'\\968pink','cmd.color(48,"'+sele+'")'],
[1,'\\978lightpink','cmd.color(5274,"'+sele+'")'],
[1,'\\644dirtyviolet','cmd.color(5272,"'+sele+'")'],
[1,'\\949violet','cmd.color(53,"'+sele+'")'],
[1,'\\525violetpurple','cmd.color(5271,"'+sele+'")'],
[1,'\\707purple','cmd.color(19,"'+sele+'")'],
[1,'\\515deeppurple','cmd.color(5261,"'+sele+'")'],
]
def cyans(self_cmd, sele):
return [
[ 2, 'Cyans' ,'' ],
[1,'\\099cyan','cmd.color(5,"'+sele+'")'],
[1,'\\799palecyan','cmd.color(5265,"'+sele+'")'],
[1,'\\499aquamarine','cmd.color(5257,"'+sele+'")'],
[1,'\\297greencyan','cmd.color(5275,"'+sele+'")'],
[1,'\\077teal','cmd.color(20,"'+sele+'")'],
[1,'\\155deepteal','cmd.color(5262,"'+sele+'")'],
[1,'\\466lightteal','cmd.color(5266,"'+sele+'")'],
]
def oranges(self_cmd, sele):
return [
[ 2, 'Oranges' ,'' ],
[1,'\\950orange','cmd.color(13,"'+sele+'")'],
[1,'\\951tv_orange','cmd.color(37,"'+sele+'")'],
[1,'\\962brightorange','cmd.color(30,"'+sele+'")'],
[1,'\\985lightorange','cmd.color(5264,"'+sele+'")'],
[1,'\\983yelloworange','cmd.color(36,"'+sele+'")'],
[1,'\\760olive','cmd.color(18,"'+sele+'")'],
[1,'\\551deepolive','cmd.color(5260,"'+sele+'")'],
]
def tints(self_cmd, sele):
return [
[ 2, 'Tints' ,'' ],
[1,'\\976wheat','cmd.color(52,"'+sele+'")'],
[1,'\\686palegreen','cmd.color(5259,"'+sele+'")'],
[1,'\\779lightblue','cmd.color(5263,"'+sele+'")'],
[1,'\\994paleyellow','cmd.color(5256,"'+sele+'")'],
[1,'\\978lightpink','cmd.color(5274,"'+sele+'")'],
[1,'\\799palecyan','cmd.color(5265,"'+sele+'")'],
[1,'\\985lightorange','cmd.color(5264,"'+sele+'")'],
[1,'\\889bluewhite','cmd.color(5278,"'+sele+'")'],
]
def grays(self_cmd, sele):
return [
[ 2, 'Grays' ,'' ],
[ 1, '\\999white ', 'cmd.color("white","'+sele+'")' ],
[ 1, '\\999gray90 ', 'cmd.color("grey90","'+sele+'")' ],
[ 1, '\\888gray80 ', 'cmd.color("grey80","'+sele+'")' ],
[ 1, '\\777gray70 ', 'cmd.color("grey70","'+sele+'")' ],
[ 1, '\\666gray60 ', 'cmd.color("grey60","'+sele+'")' ],
[ 1, '\\555gray50 ', 'cmd.color("grey50","'+sele+'")' ],
[ 1, '\\444gray40 ', 'cmd.color("grey40","'+sele+'")' ],
[ 1, '\\333gray30 ', 'cmd.color("grey30","'+sele+'")' ],
[ 1, '\\222gray20 ', 'cmd.color("grey20","'+sele+'")' ],
[ 1, '\\222gray10 ', 'cmd.color("grey10","'+sele+'")' ],
[ 1, '\\222black ', 'cmd.color("black","'+sele+'")' ],
]
def all_colors(self_cmd, sele):
return [
[ 1, '\\900reds' ,reds(self_cmd, sele) ],
[ 1, '\\090greens' ,greens(self_cmd, sele) ],
[ 1, '\\009blues' ,blues(self_cmd, sele) ],
[ 1, '\\990yellows' ,yellows(self_cmd, sele) ],
[ 1, '\\909magentas' , magentas(self_cmd, sele) ],
[ 1, '\\099cyans' , cyans(self_cmd, sele) ],
[ 1, '\\950oranges' , oranges(self_cmd, sele) ],
[ 1, '\\978tints' ,tints(self_cmd, sele) ],
[ 1, '\\666grays' ,grays(self_cmd, sele) ],
# [ 0, '', ''],
# [ 1, '\\900red' ,'cmd.color("red","'+sele+'")' ],
# [ 1, '\\090green' ,'cmd.color("green","'+sele+'")' ],
# [ 1, '\\009blue' ,'cmd.color("blue","'+sele+'")' ],
# [ 1, '\\990yellow' ,'cmd.color("yellow","'+sele+'")' ],
# [ 1, '\\909magenta' ,'cmd.color("magenta","'+sele+'")' ],
# [ 1, '\\099cyan' ,'cmd.color("cyan","'+sele+'")' ],
# [ 1, '\\955salmon' ,'cmd.color("salmon","'+sele+'")' ],
# [1, '\\940orange','cmd.color(13,"'+sele+'")'],
#
# [ 1, '\\555gray' ,'cmd.color("gray","'+sele+'")' ],
# [ 1, '\\999white' ,'cmd.color("white","'+sele+'")' ],
]
#def vol_color(self_cmd, sele):
# print "Be sure to finish the color_volume colorRamps"
# return [
# [2, 'Colors:', ''],
# [1, 'High Focus', 'cmd.volume_color("'+sele+'","-1")' ],
# [1, 'Med. Focus', 'cmd.volume_color("'+sele+'","-2")' ],
# [1, 'Low Focus', 'cmd.volume_color("'+sele+'","-3")' ],
# [1, 'Solvent Focus', 'cmd.volume_color("'+sele+'","-4")' ],
# [1, 'Ion Focus', 'cmd.volume_color("'+sele+'","-5")' ],
# ]
def color_auto(self_cmd, sele):
return [
[ 2, 'Auto' ,'' ],
[ 1, 'elem c', 'cmd.color("auto","('+sele+') and elem c")' ],
[ 0, '' , '' ],
[ 1, 'all','cmd.color("auto","'+sele+'")' ],
[ 0, '' , '' ],
[ 1, '\\900b\\950y \\090o\\099b\\059j\\999(e. c)',
'util.color_objs("('+sele+' and elem c)",_self=cmd)'],
[ 1, '\\900b\\950y \\090o\\099b\\059j',
'util.color_objs("('+sele+')",_self=cmd)'],
]
def mol_color(self_cmd, sele):
return (
[[ 2, 'Color:' ,'' ],
[ 1, 'by element' , by_elem(self_cmd, sele) ],
[ 1, 'by chain' , by_chain(self_cmd, sele) ],
[ 1, 'by ss ' , by_ss(self_cmd, sele) ],
[ 1, '\\900s\\950p\\990e\\090c\\099t\\059r\\009u\\555m', spectrum(self_cmd, sele) ],
[ 0, '' , '' ],
[ 1, 'auto', color_auto(self_cmd, sele) ],
[ 0, '' , '' ],
] +
all_colors(self_cmd, sele))
def general_color(self_cmd, sele):
return [[ 2, 'Color:' ,'' ]] + all_colors(self_cmd, sele)
def preset_ligand_sites(self_cmd, sele):
return [[ 2, 'Ligand Sites:', ''],
[ 1, 'cartoon' , 'preset.ligand_cartoon("'+sele+'",_self=cmd)' ],
[ 0, '', ''],
[ 1, 'solid surface' , 'preset.ligand_sites("'+sele+'",_self=cmd)' ],
[ 1, 'solid (better)' , 'preset.ligand_sites_hq("'+sele+'",_self=cmd)' ],
[ 0, '', ''],
[ 1, 'transparent surface' , 'preset.ligand_sites_trans("'+sele+'",_self=cmd)' ],
[ 1, 'transparent (better)' , 'preset.ligand_sites_trans_hq("'+sele+'",_self=cmd)' ],
[ 0, '', ''],
[ 1, 'dot surface' , 'preset.ligand_sites_dots("'+sele+'",_self=cmd)' ],
[ 0, '', ''],
[ 1, 'mesh surface' , 'preset.ligand_sites_mesh("'+sele+'",_self=cmd)' ]]
def presets(self_cmd, sele):
return [[ 2, 'Preset:' ,'' ],
[ 1, 'simple' ,'preset.simple("'+sele+'",_self=cmd)' ],
[ 1, 'simple (no solvent)' ,'preset.simple_no_solv("'+sele+'",_self=cmd)' ],
[ 1, 'ball and stick' , 'preset.ball_and_stick("'+sele+'",_self=cmd)' ],
[ 1, 'b factor putty' , 'preset.b_factor_putty("'+sele+'",_self=cmd)' ],
[ 1, 'technical' , 'preset.technical("'+sele+'",_self=cmd)' ],
[ 1, 'ligands' , 'preset.ligands("'+sele+'",_self=cmd)' ],
[ 1, 'ligand sites' , preset_ligand_sites(self_cmd, sele) ],
[ 1, 'pretty ', 'preset.pretty("'+sele+'",_self=cmd)' ],
[ 1, 'pretty (with solvent)' , 'preset.pretty_solv("'+sele+'",_self=cmd)' ],
[ 1, 'publication ' , 'preset.publication("'+sele+'",_self=cmd)' ],
[ 1, 'publication (with solvent)' , 'preset.pub_solv("'+sele+'",_self=cmd)' ],
[ 0, '' ,'' ],
[ 1, 'default' ,'preset.default("'+sele+'",_self=cmd)' ],
]
def hydrogens(self_cmd, sele):
return [[ 2, 'Hydrogens:' ,'' ],
[ 1, 'add' ,'cmd.h_add("'+sele+'")' ],
[ 1, 'remove' ,'cmd.remove("('+sele+') and hydro")' ],
]
def state(self_cmd, sele):
return [[ 2, 'State:' ,'' ],
[ 1, 'freeze' ,'cmd.set("state",cmd.get_state(),"'+sele+'")' ],
[ 1, 'thaw' ,'cmd.set("state",cmd.get("state","'+sele+'"));cmd.unset("state","'+sele+'")' ],
]
def movement(self_cmd, sele):
return [[ 2, 'Movement:' ,'' ],
[ 1, 'protect' ,'cmd.protect("'+sele+'")' ],
[ 1, 'deprotect' ,'cmd.deprotect("'+sele+'")' ],
]
def sequence(self_cmd, sele):
return [[ 2, 'Sequence:' ,'' ],
[ 1, 'include' ,'cmd.set("seq_view","on","'+sele+'")' ],
[ 1, 'exclude' ,'cmd.set("seq_view","off","'+sele+'")' ],
[ 0, '' ,'' ],
[ 1, 'default' ,'cmd.unset("seq_view","'+sele+'")' ],
]
def masking(self_cmd, sele):
return [[ 2, 'Masking:' ,'' ],
[ 1, 'mask' ,'cmd.mask("'+sele+'")' ],
[ 1, 'unmask' ,'cmd.unmask("'+sele+'")' ],
]
def compute(self_cmd, sele):
return [[ 2, 'Compute:', '' ],
[ 1, 'atom count' ,'cmd.count_atoms("'+sele+'",quiet=0)' ],
[ 1, 'charges',
[[ 2, 'Charge:', ''],
[ 1, 'formal charge sum' ,'util.sum_formal_charges("'+sele+'",quiet=0,_self=cmd)' ],
[ 1, 'partial charges sum' ,'util.sum_partial_charges("'+sele+'",quiet=0,_self=cmd)' ],
]],
[ 1, 'surface area',
[[ 2, 'Surface Area Type:', ''],
[ 1, 'molecular', 'cmd.get_area("'+sele+'",quiet=0,_self=cmd)' ],
[ 1, 'solvent accessible', 'util.get_sasa(sele="'+sele+'",_self=cmd)' ],
]],
[ 1, 'molecular weight',
[[ 2, 'Molecular Weight:', ''],
[ 1, 'explicit', 'util.compute_mass("'+sele+'",implicit=False,quiet=0,_self=cmd)' ],
[ 1, 'with missing hydrogens', 'util.compute_mass("'+sele+'",implicit=True,quiet=0,_self=cmd)' ],
]],
]
def vacuum(self_cmd, sele):
return [[ 2, 'Vacuum Electrostatics:' ,'' ],
# [ 2, '\\955WARNING:\\595 Unvalidated and experimental code!', '' ],
[ 1, 'protein contact potential (local)', 'util.protein_vacuum_esp("'+sele+'",mode=2,quiet=0,_self=cmd)' ],
# [ 1, 'protein surface potential (absolute)', 'util.protein_vacuum_esp("'+sele+'",mode=0,quiet=0,_self=cmd)' ],
# [ 1, 'protein surface potential (relative)', 'util.protein_vacuum_esp("'+sele+'",mode=1,quiet=0,_self=cmd)' ],
[ 2, '\\955NOTE:\\559 Due to short cutoffs, truncations, and', ''],
[ 2, '\\559lack of solvent "screening", these computed ', ''],
[ 2, '\\559potentials are only qualitatively useful.', ''],
[ 2, '\\559Please view with skepticism!', '' ],
]
def symmetry(self_cmd, sele):
return [[ 2, 'Symmetry Mates:' ,'' ],
[ 2, '\\955 +/- one unit cell and...', '' ],
[ 1, 'within 4 A', 'cmd.symexp("'+sele+'_","'+sele+'","'+sele+'",cutoff=4,segi=1)' ],
[ 1, 'within 5 A', 'cmd.symexp("'+sele+'_","'+sele+'","'+sele+'",cutoff=5,segi=1)' ],
[ 1, 'within 6 A', 'cmd.symexp("'+sele+'_","'+sele+'","'+sele+'",cutoff=6,segi=1)' ],
[ 1, 'within 8 A', 'cmd.symexp("'+sele+'_","'+sele+'","'+sele+'",cutoff=8,segi=1)' ],
[ 1, 'within 12 A', 'cmd.symexp("'+sele+'_","'+sele+'","'+sele+'",cutoff=12,segi=1)' ],
[ 1, 'within 20 A', 'cmd.symexp("'+sele+'_","'+sele+'","'+sele+'",cutoff=20,segi=1)' ],
[ 1, 'within 50 A', 'cmd.symexp("'+sele+'_","'+sele+'","'+sele+'",cutoff=50,segi=1)' ],
[ 1, 'within 100 A', 'cmd.symexp("'+sele+'_","'+sele+'","'+sele+'",cutoff=100,segi=1)' ],
[ 1, 'within 250 A', 'cmd.symexp("'+sele+'_","'+sele+'","'+sele+'",cutoff=250,segi=1)' ],
[ 1, 'within 1000 A', 'cmd.symexp("'+sele+'_","'+sele+'","'+sele+'",cutoff=1000,segi=1)' ]]
def mol_assign(self_cmd, sele):
return [[ 2, 'Assign:' ,'' ],
[ 1, 'Amber 99 atomic properties', 'util.assign_amber99("'+sele+'",_self=cmd)' ],
]
def selection(self_cmd, sele):
return [[ 2, 'Selections:', '' ],
[ 1, 'all', 'cmd.select("'+sele+'_all","'+sele+'")'],
[ 1, 'polymer', 'cmd.select("'+sele+'_polymer","('+sele+') and polymer")'],
[ 1, 'organic', 'cmd.select("'+sele+'_organic","('+sele+') and organic")'],
[ 1, 'solvent', 'cmd.select("'+sele+'_solvent","('+sele+') and solvent")'],
[ 1, 'polar hydrogens', 'cmd.select("'+sele+'_polar_h","('+sele+') and (e. H and bound_to e. S+O+N)")'],
[ 1, 'non-polar hydrogens', 'cmd.select("'+sele+'_npolar_h","('+sele+') and (e. H and (not bound_to e. S+O+N))")'],
[ 1, 'donors', 'cmd.select("'+sele+'_donors","('+sele+') and hbd")'],
[ 1, 'acceptors', 'cmd.select("'+sele+'_acceptors","('+sele+') and hba")'],
[ 1, 'surface atoms', 'util.find_surface_atoms(sele="'+sele+'", _self=cmd)' ],
]
def mol_generate(self_cmd, sele):
return [[ 2, 'Generate:' ,'' ],
[ 1, 'selection', selection(self_cmd, sele) ],
[ 1, 'symmetry mates', symmetry(self_cmd, sele) ],
[ 1, 'vacuum electrostatics', vacuum(self_cmd, sele) ],
# [ 1, 'assign', mol_assign(self_cmd, sele) ],
]
def invert(self_cmd, sele):
return [[ 2, 'Invert:' ,'' ],
[ 1, 'within object(s)' ,'cmd.select("'+sele+'","((byobj '+sele+') and not '+sele+')",enable=1)' ],
[ 1, 'within segment(s)' ,'cmd.select("'+sele+'","((byseg '+sele+') and not '+sele+')",enable=1)' ],
[ 1, 'within chain(s)' ,'cmd.select("'+sele+'","((bychain '+sele+') and not '+sele+')",enable=1)' ],
[ 1, 'within residue(s)' ,'cmd.select("'+sele+'","((byres '+sele+') and not '+sele+')",enable=1)' ],
[ 0, '' ,'' ],
[ 1, 'within molecule(s)' ,'cmd.select("'+sele+'","((bymol '+sele+') and not '+sele+')",enable=1)' ],
[ 0, '' ,'' ],
[ 1, 'within any' ,'cmd.select("'+sele+'","(not '+sele+')",enable=1)' ],
]
def complete(self_cmd, sele):
return [[ 2, 'Complete:' ,'' ],
[ 1, 'residues' ,'cmd.select("'+sele+'","(byres '+sele+')",enable=1)' ],
[ 1, 'chains' ,'cmd.select("'+sele+'","(bychain '+sele+')",enable=1)' ],
[ 1, 'segments' ,'cmd.select("'+sele+'","(byseg '+sele+')",enable=1)' ],
[ 1, 'objects' ,'cmd.select("'+sele+'","(byobj '+sele+')",enable=1)' ],
[ 0, '' ,'' ],
[ 1, 'molecules' ,'cmd.select("'+sele+'","(bymol '+sele+')",enable=1)' ],
[ 0, '' ,'' ],
[ 1, 'C-alphas' ,'cmd.select("'+sele+'","(bycalpha '+sele+')",enable=1)' ],
]
def modify_by_object(self_cmd, sele, op):
list = self_cmd.get_names("public_objects",1)[0:25] # keep this practical
list = filter(lambda x:self_cmd.get_type(x)=="object:molecule",list)
result = [[ 2, 'Object:', '']]
for a in list:
if a!=sele:
result.append([1,a,
'cmd.select("'+sele+'","('+sele+') '+op+' ('+a+')",enable=1)'])
return result
def modify_by_sele(self_cmd, sele, op):
list = self_cmd.get_names("public_selections",0)[0:25] # keep this practical
result = [[ 2, 'Selection:', '']]
for a in list:
if a!=sele:
result.append([1,a, 'cmd.select("'+sele+'","('+sele+') '+op+' ('+a+')",enable=1)'])
return result
def restrict(self_cmd, sele):
return [[ 2, 'Restrict:' ,'' ],
[ 1, 'to object' , modify_by_object(self_cmd, sele,'and') ],
[ 1, 'to selection' , modify_by_sele(self_cmd, sele,'and') ],
[ 0, '' ,'' ],
[ 1, 'to visible' , 'cmd.select("'+sele+'","('+sele+') and vis",enable=1)'],
[ 0, '' ,'' ],
[ 1, 'to polymer' , 'cmd.select("'+sele+'","('+sele+') and polymer",enable=1)'],
[ 1, 'to solvent' , 'cmd.select("'+sele+'","('+sele+') and solvent",enable=1)'],
[ 1, 'to organic' , 'cmd.select("'+sele+'","('+sele+') and organic",enable=1)'],
[ 1, 'to inorganic' , 'cmd.select("'+sele+'","('+sele+') and inorganic",enable=1)'],
]
def include(self_cmd, sele):
return [[ 2, 'Include:' ,'' ],
[ 1, 'object' , modify_by_object(self_cmd, sele,'or') ],
[ 1, 'selection' , modify_by_sele(self_cmd, sele,'or') ],
[ 0, '' ,'' ],
[ 1, 'visible' , 'cmd.select("'+sele+'","('+sele+') or vis",enable=1)'],
]
def exclude(self_cmd, sele):
return [[ 2, 'Exclude:' ,'' ],
[ 1, 'object' , modify_by_object(self_cmd, sele,'and not') ],
[ 1, 'selection' , modify_by_sele(self_cmd, sele,'and not') ],
[ 0, '' ,'' ],
[ 1, 'polymer' , 'cmd.select("'+sele+'","('+sele+') and not organic",enable=1)'],
[ 1, 'solvent' , 'cmd.select("'+sele+'","('+sele+') and not solvent",enable=1)'],
[ 1, 'organic' , 'cmd.select("'+sele+'","('+sele+') and not organic",enable=1)'],
[ 1, 'inorganic' , 'cmd.select("'+sele+'","('+sele+') and not organic",enable=1)'],
]
def expand(self_cmd, sele):
return [[ 2, 'Expand:' ,'' ],
[ 1, 'by 4 A' ,'cmd.select("'+sele+'","('+sele+' expand 4)",enable=1)' ],
[ 1, 'by 5 A' ,'cmd.select("'+sele+'","('+sele+' expand 5)",enable=1)' ],
[ 1, 'by 6 A' ,'cmd.select("'+sele+'","('+sele+' expand 6)",enable=1)' ],
[ 1, 'by 8 A' ,'cmd.select("'+sele+'","('+sele+' expand 8)",enable=1)' ],
[ 1, 'by 12 A' ,'cmd.select("'+sele+'","('+sele+' expand 12)",enable=1)' ],
[ 1, 'by 20 A' ,'cmd.select("'+sele+'","('+sele+' expand 20)",enable=1)' ],
[ 0, '' ,'' ],
[ 1, 'by 4 A, residues' ,'cmd.select("'+sele+'","(byres ('+sele+' expand 4))",enable=1)' ],
[ 1, 'by 5 A, residues' ,'cmd.select("'+sele+'","(byres ('+sele+' expand 5))",enable=1)' ],
[ 1, 'by 6 A, residues' ,'cmd.select("'+sele+'","(byres ('+sele+' expand 6))",enable=1)' ],
[ 1, 'by 8 A, residues' ,'cmd.select("'+sele+'","(byres ('+sele+' expand 8))",enable=1)' ],
[ 1, 'by 12 A, residues' ,'cmd.select("'+sele+'","(byres ('+sele+' expand 12))",enable=1)' ],
[ 1, 'by 20 A, residues' ,'cmd.select("'+sele+'","(byres ('+sele+' expand 20))",enable=1)' ],
]
def around(self_cmd, sele):
return [[ 2, 'Around:' ,'' ],
[ 1, 'atoms within 4 A' ,'cmd.select("'+sele+'","('+sele+' around 4)",enable=1)' ],
[ 1, 'atoms within 5 A' ,'cmd.select("'+sele+'","('+sele+' around 5)",enable=1)' ],
[ 1, 'atoms within 6 A' ,'cmd.select("'+sele+'","('+sele+' around 6)",enable=1)' ],
[ 1, 'atoms within 8 A' ,'cmd.select("'+sele+'","('+sele+' around 8)",enable=1)' ],
[ 1, 'atoms within 12 A' ,'cmd.select("'+sele+'","('+sele+' around 12)",enable=1)' ],
[ 1, 'atoms within 20 A' ,'cmd.select("'+sele+'","('+sele+' around 20)",enable=1)' ],
[ 0, '' ,'' ],
[ 1, 'residues within 4 A' ,'cmd.select("'+sele+'","(byres ('+sele+' around 4))",enable=1)' ],
[ 1, 'residues within 5 A' ,'cmd.select("'+sele+'","(byres ('+sele+' around 5))",enable=1)' ],
[ 1, 'residues within 6 A' ,'cmd.select("'+sele+'","(byres ('+sele+' around 6))",enable=1)' ],
[ 1, 'residues within 8 A' ,'cmd.select("'+sele+'","(byres ('+sele+' around 8))",enable=1)' ],
[ 1, 'residues within 12 A' ,'cmd.select("'+sele+'","(byres ('+sele+' around 12))",enable=1)' ],
[ 1, 'residues within 20 A' ,'cmd.select("'+sele+'","(byres ('+sele+' around 20))",enable=1)' ],
]
def extend(self_cmd, sele):
return [[ 2, 'Extend:' ,'' ],
[ 1, 'by 1 bond' ,'cmd.select("'+sele+'","('+sele+' extend 1)",enable=1)' ],
[ 1, 'by 2 bonds' ,'cmd.select("'+sele+'","('+sele+' extend 2)",enable=1)' ],
[ 1, 'by 3 bonds' ,'cmd.select("'+sele+'","('+sele+' extend 3)",enable=1)' ],
[ 1, 'by 4 bonds' ,'cmd.select("'+sele+'","('+sele+' extend 4)",enable=1)' ],
[ 1, 'by 5 bonds' ,'cmd.select("'+sele+'","('+sele+' extend 5)",enable=1)' ],
[ 1, 'by 6 bonds' ,'cmd.select("'+sele+'","('+sele+' extend 6)",enable=1)' ],
[ 0, '' ,'' ],
[ 1, 'by 1 bond, residues' ,'cmd.select("'+sele+'","(byres ('+sele+' extend 1))",enable=1)' ],
[ 1, 'by 2 bonds, residues' ,'cmd.select("'+sele+'","(byres ('+sele+' extend 2))",enable=1)' ],
[ 1, 'by 3 bonds, residues' ,'cmd.select("'+sele+'","(byres ('+sele+' extend 3))",enable=1)' ],
[ 1, 'by 4 bonds, residues' ,'cmd.select("'+sele+'","(byres ('+sele+' extend 4))",enable=1)' ],
[ 1, 'by 5 bonds, residues' ,'cmd.select("'+sele+'","(byres ('+sele+' extend 5))",enable=1)' ],
[ 1, 'by 6 bonds, residues' ,'cmd.select("'+sele+'","(byres ('+sele+' extend 6))",enable=1)' ],
]
def polar(self_cmd, sele):
return [[ 2, 'Polar Contacts:', ''],
[ 1, 'within selection' ,
'cmd.dist("'+sele+'_polar_conts","'+sele+'","'+sele+'",quiet=1,mode=2,label=0,reset=1);cmd.enable("'+sele+'_polar_conts")'],
[ 1, 'involving side chains' ,
'cmd.dist("'+sele+'_polar_conts","('+sele+')","('+sele+
') and polymer and not (name n,o,h)",quiet=1,mode=2,label=0,reset=1);cmd.enable("'+sele+'_polar_conts")'],
[ 1, 'involving solvent' ,
'cmd.dist("'+sele+'_polar_conts","('+sele+') and solvent","('+sele+
') and not (solvent)",quiet=1,mode=2,label=0,reset=1);cmd.enable("'+sele+'_polar_conts")'],
[ 1, 'excluding solvent' ,
'cmd.dist("'+sele+'_polar_conts","('+sele+') and not (solvent)","('+sele+
') and not (solvent)",quiet=1,mode=2,label=0,reset=1);cmd.enable("'+sele+'_polar_conts")'],
[ 1, 'excluding main chain' ,
'cmd.dist("'+sele+'_polar_conts","('+sele+') and not (polymer and name n,o,h)","('+sele+
') and not (polymer and name n,o,h)",quiet=1,mode=2,label=0,reset=1);cmd.enable("'+sele+'_polar_conts")'],
[ 1, 'excluding intra-main chain' ,
'cmd.dist("'+sele+'_polar_conts","('+sele+')","('+sele+
') and not (polymer and name n,o,h)",quiet=1,mode=2,label=0,reset=1);cmd.enable("'+sele+'_polar_conts")'],
[ 1, 'just intra-side chain' ,
'cmd.dist("'+sele+'_polar_conts","('+sele+') and not (solvent or (polymer and name n,o,h))","('+sele+
') and not (solvent or (polymer and name n,o,h))",quiet=1,mode=2,label=0,reset=1);cmd.enable("'+sele+
'_polar_conts")'],
[ 1, 'just intra-main chain' ,
'cmd.dist("'+sele+'_polar_conts","('+sele+') and not (solvent or (polymer and not name n,o,h))","('+sele+
') and not (solvent or (polymer and not name n,o,h))",quiet=1,mode=2,label=0,reset=1);cmd.enable("'+sele+
'_polar_conts")'],
[ 0, '', '' ],
[ 1, 'to other atoms in object',
'cmd.dist("'+sele+'_polar_conts","('+sele+')","(byobj ('+sele+')) and (not ('+sele+
'))",quiet=1,mode=2,label=0,reset=1);cmd.enable("'+sele+'_polar_conts")'],
[ 1, 'to others excluding solvent',
'cmd.dist("'+sele+'_polar_conts","('+sele+') and not solvent","(byobj ('+sele+')) and (not ('+sele+
')) and (not solvent)",quiet=1,mode=2,label=0,reset=1);cmd.enable("'+sele+'_polar_conts")'],
[ 1, 'to any atoms',
'cmd.dist("'+sele+'_polar_conts","('+sele+')","(not '+sele+
')",quiet=1,mode=2,label=0,reset=1);cmd.enable("'+sele+'_polar_conts")'],
[ 1, 'to any excluding solvent',
'cmd.dist("'+sele+'_polar_conts","('+sele+') and not solvent","(not ('+sele+
')) and not solvent",quiet=1,mode=2,label=0,reset=1);cmd.enable("'+sele+'_polar_conts")'],
]
def polar_inter(self_cmd, sele):
return [[ 2, 'Polar Contacts:', ''],
]
def find(self_cmd, sele):
return [[ 2, 'Find:', ''],
[ 1, 'polar contacts', polar(self_cmd, sele) ],
]
def align_to_object(self_cmd, sele):
list = self_cmd.get_names("public_objects",1)[0:25] # keep this practical
list = filter(lambda x:self_cmd.get_type(x)=="object:molecule",list)
result = [[ 2, 'Object:', '']]
for a in list:
if a!=sele:
result.append([1,a,
'cmd.align("polymer and name ca and ('+sele+')",'+
'"polymer and name ca and ('+a+')",quiet=0,'+
'object="aln_%s_to_%s",reset=1)'%(sele,a)])
return result
def align_to_sele(self_cmd, sele):
list = self_cmd.get_names("public_selections",0)[0:25] # keep this practical
result = [[ 2, 'Selection:', '']]
for a in list:
if a!= sele:
result.append([1,a,
'cmd.align("polymer and name ca and ('+sele+')",'+
'"polymer and name ca and ('+a+')",quiet=0,'+
'object="aln_%s_to_%s",reset=1)'%(sele,a)])
return result
def mat_tran(self_cmd, sele, direction=0):
list = self_cmd.get_names("public_objects",1)[0:25] # keep this practical
list = filter(lambda x:self_cmd.get_type(x)=="object:molecule",list)
result = [[ 2, 'Object:', '']]
for a in list:
if a!=sele:
if direction:
result.append([1,a,
'cmd.matrix_transfer("'+a+'","'+sele+'");'])
else:
result.append([1,a,
'cmd.matrix_transfer("'+sele+'","'+a+'");'])
return result
def sele_align(self_cmd, sele):
return [[ 2, 'Align:', ''],
[ 1, 'to molecule', align_to_object(self_cmd, sele) ],
[ 1, 'to selection', align_to_sele(self_cmd, sele) ],
[ 0, '', None ],
[ 1, 'enabled to this', 'util.mass_align("'+sele+'",1,_self=cmd)' ],
[ 1, 'all to this', 'util.mass_align("'+sele+'",0,_self=cmd)' ],
[ 0, '', None ],
[ 1, 'states (*/ca)', 'cmd.intra_fit("('+sele+') and name ca")' ],
[ 1, 'states', 'cmd.intra_fit("'+sele+'")' ],
]
def mol_align(self_cmd, sele):
return [[ 2, 'Align:', ''],
[ 1, 'to molecule', align_to_object(self_cmd, sele) ],
[ 1, 'to selection', align_to_sele(self_cmd, sele) ],
[ 0, '', None ],
[ 1, 'enabled to this', 'util.mass_align("'+sele+'",1,_self=cmd)' ],
[ 1, 'all to this', 'util.mass_align("'+sele+'",0,_self=cmd)' ],
[ 0, '', None ],
[ 1, 'states (*/ca)', 'cmd.intra_fit("('+sele+') and name ca")' ],
[ 1, 'states', 'cmd.intra_fit("'+sele+'")' ],
[ 0, '', None ],
[ 1, 'matrix from', mat_tran(self_cmd, sele,1) ],
[ 1, 'matrix to', mat_tran(self_cmd, sele,0) ],
[ 1, 'matrix reset', 'cmd.matrix_reset("'+sele+'")'],
]
def modify_sele(self_cmd, sele):
return [[ 2, 'Modify:', ''],
[ 1, 'around' , around(self_cmd, sele) ],
[ 1, 'expand' , expand(self_cmd, sele) ],
[ 1, 'extend' , extend(self_cmd, sele) ],
[ 1, 'invert' , invert(self_cmd, sele) ],
[ 1, 'complete' , complete(self_cmd, sele) ],
[ 1, 'restrict' , restrict(self_cmd, sele) ],
[ 1, 'include' , include(self_cmd, sele) ],
[ 1, 'exclude' , exclude(self_cmd, sele) ]]
def sele_action(self_cmd, sele):
return [[ 2, 'Action:' ,'' ],
[ 1, 'delete selection', 'cmd.delete("'+sele+'")' ],
[ 1, 'rename selection', 'cmd.wizard("renaming","'+sele+'")' ],
[ 0, '' ,'' ],
[ 1, 'zoom' ,'cmd.zoom("'+sele+'",animate=-1)' ],
[ 1, 'orient' ,'cmd.orient("'+sele+'",animate=-1)' ],
[ 1, 'center' ,'cmd.center("'+sele+'",animate=-1)' ],
[ 1, 'origin' ,'cmd.origin("'+sele+'")' ],
[ 0, '' ,'' ],
[ 1, 'drag coordinates' , 'cmd.drag("'+sele+'")' ],
[ 1, 'clean' , 'cmd.clean("'+sele+'")' ],
[ 0, '' ,'' ],
[ 1, 'modify', modify_sele(self_cmd, sele) ],
[ 1, 'preset' ,presets(self_cmd, sele) ],
[ 1, 'find', find(self_cmd, sele) ],
[ 1, 'align', sele_align(self_cmd, sele) ],
[ 0, '' ,'' ],
[ 1, 'remove atoms' ,'cmd.remove("'+sele+'");cmd.delete("'+sele+'")' ],
[ 0, '' ,'' ],
[ 1, 'duplicate' ,'cmd.select(None,"'+sele+'")' ], # broken...
[ 1, 'copy to object' ,'cmd.create(None,"'+sele+'",zoom=0)' ],
[ 1, 'extract object' ,'cmd.extract(None,"'+sele+'",zoom=0)' ],
[ 0, '' ,'' ],
[ 1, 'masking' , masking(self_cmd, sele) ],
[ 1, 'movement' , movement(self_cmd, sele) ],
[ 1, 'compute' , compute(self_cmd, sele) ],
]
def sele_action2(self_cmd, sele):
return [[ 2, 'Action:' ,'' ],
[ 1, 'delete selection', 'cmd.delete("'+sele+'")' ],
[ 1, 'rename selection', 'cmd.wizard("renaming","'+sele+'")' ],
[ 0, '' ,'' ],
[ 1, 'preset' ,presets(self_cmd, sele) ],
[ 1, 'find', find(self_cmd, sele) ],
[ 0, '' ,'' ],
[ 1, 'remove atoms' ,'cmd.remove("'+sele+'");cmd.delete("'+sele+'")' ],
[ 0, '' ,'' ],
[ 1, 'around' , around(self_cmd, sele) ],
[ 1, 'expand' , expand(self_cmd, sele) ],
[ 1, 'extend' , extend(self_cmd, sele) ],
[ 1, 'invert' , invert(self_cmd, sele) ],
[ 1, 'complete' , complete(self_cmd, sele) ],
[ 0, '' ,'' ],
[ 1, 'duplicate selection' ,'cmd.select(None,"'+sele+'")' ],
[ 1, 'copy to object' ,'cmd.create(None,"'+sele+'",zoom=0)' ],
[ 1, 'extract object' ,'cmd.extract(None,"'+sele+'",zoom=0)' ],
[ 0, '' ,'' ],
[ 1, 'masking' , masking(self_cmd, sele) ],
[ 1, 'movement' , movement(self_cmd, sele) ],
[ 1, 'compute' , compute(self_cmd, sele) ],
]
def group_action(self_cmd, sele):
return [[ 2, 'Action:' , '' ],
[ 1, 'zoom' , 'cmd.zoom("'+sele+'",animate=-1)' ],
[ 1, 'orient' , 'cmd.orient("'+sele+'",animate=-1)' ],
[ 1, 'center' ,'cmd.center("'+sele+'",animate=-1)' ],
[ 1, 'origin' , 'cmd.origin("'+sele+'")' ],
[ 0, '' ,'' ],
[ 1, 'drag' , 'cmd.drag("'+sele+'")' ],
[ 1, 'reset' , 'cmd.reset(object="'+sele+'")' ],
[ 0, '' ,'' ],
[ 1, 'preset' , presets(self_cmd, sele) ],
[ 1, 'find', find(self_cmd, sele) ],
[ 1, 'align', mol_align(self_cmd, sele) ],
[ 1, 'generate' , mol_generate(self_cmd, sele) ],
[ 0, '' ,'' ],
[ 1, 'assign sec. struc.' ,'cmd.dss("'+sele+'")' ],
[ 0, '' , '' ],
[ 1, 'rename group', 'cmd.wizard("renaming","'+sele+'")' ],
[ 1, 'delete group' , 'cmd.delete("'+sele+'")' ],
[ 0, '' ,'' ],
[ 1, 'hydrogens' , hydrogens(self_cmd, sele) ],
[ 1, 'remove waters' ,'cmd.remove("(solvent and ('+sele+'))")' ],
[ 0, '' ,'' ],
[ 1, 'state' , state(self_cmd, sele) ],
[ 1, 'masking' , masking(self_cmd, sele) ],
[ 1, 'sequence' , sequence(self_cmd, sele) ],
[ 1, 'movement' , movement(self_cmd, sele) ],
[ 1, 'compute' , compute(self_cmd, sele) ],
]
def mol_action(self_cmd, sele):
return [[ 2, 'Action:' , '' ],
[ 1, 'zoom' , 'cmd.zoom("'+sele+'",animate=-1)' ],
[ 1, 'orient' , 'cmd.orient("'+sele+'",animate=-1)' ],
[ 1, 'center' ,'cmd.center("'+sele+'",animate=-1)' ],
[ 1, 'origin' , 'cmd.origin("'+sele+'")' ],
[ 0, '' ,'' ],
[ 1, 'drag matrix' , 'cmd.drag("'+sele+'")' ],
[ 1, 'reset matrix' , 'cmd.reset(object="'+sele+'")' ],
[ 0, '' ,'' ],
[ 1, 'drag coordinates' , 'cmd.drag("('+sele+')")' ],
[ 1, 'clean' , 'cmd.clean("'+sele+'")' ],
[ 0, '' ,'' ],
[ 1, 'preset' , presets(self_cmd, sele) ],
[ 1, 'find', find(self_cmd, sele) ],
[ 1, 'align', mol_align(self_cmd, sele) ],
[ 1, 'generate' , mol_generate(self_cmd, sele) ],
[ 0, '' ,'' ],
[ 1, 'assign sec. struc.' ,'cmd.dss("'+sele+'")' ],
[ 0, '' , '' ],
[ 1, 'rename object', 'cmd.wizard("renaming","'+sele+'")' ],
[ 1, 'duplicate object' ,'cmd.create(None,"'+sele+'")' ],
[ 1, 'delete object' , 'cmd.delete("'+sele+'")' ],
[ 0, '' ,'' ],
[ 1, 'hydrogens' , hydrogens(self_cmd, sele) ],
[ 1, 'remove waters' ,'cmd.remove("(solvent and ('+sele+'))")' ],
[ 0, '' ,'' ],
[ 1, 'state' , state(self_cmd, sele) ],
[ 1, 'masking' , masking(self_cmd, sele) ],
[ 1, 'sequence' , sequence(self_cmd, sele) ],
[ 1, 'movement' , movement(self_cmd, sele) ],
[ 1, 'compute' , compute(self_cmd, sele) ],
]
def slice_action(self_cmd, sele):
return [[ 2, 'Action:' , '' ],
[ 1, 'zoom' , 'cmd.zoom("'+sele+'",animate=-1)' ],
[ 1, 'center' , 'cmd.center("'+sele+'",animate=-1)' ],
[ 1, 'origin' , 'cmd.origin("'+sele+'")' ],
[ 0, '' , '' ],
[ 1, 'tracking on' , 'cmd.set("slice_track_camera",1,"'+sele+'")' ],
[ 1, 'tracking off' , 'cmd.set("slice_track_camera",0,"'+sele+'")' ],
[ 0, '' , '' ],
[ 1, 'height map on' , 'cmd.set("slice_height_map",1,"'+sele+'")' ],
[ 1, 'height map off', 'cmd.set("slice_height_map",0,"'+sele+'")' ],
[ 0, '' , '' ],
[ 1, 'dynamic grid on' , 'cmd.set("slice_dynamic_grid",1,"'+sele+'")' ],
[ 1, 'dynamic grid off', 'cmd.set("slice_dynamic_grid",0,"'+sele+'")' ],
[ 0, '' , '' ],
[ 1, 'rename' , 'cmd.wizard("renaming","'+sele+'")' ],
[ 0, '' , '' ],
[ 1, 'delete' , 'cmd.delete("'+sele+'")' ],
]
def simple_action(self_cmd, sele):
return [[ 2, 'Action:' , '' ],
[ 1, 'zoom' , 'cmd.zoom("'+sele+'",animate=-1)' ],
[ 1, 'center' , 'cmd.center("'+sele+'",animate=-1)' ],
[ 1, 'origin' , 'cmd.origin("'+sele+'")' ],
[ 0, '' , '' ],
[ 1, 'drag' , 'cmd.drag("'+sele+'")' ],
[ 1, 'reset' , 'cmd.reset(object="'+sele+'")' ],
[ 0, '' , '' ],
[ 1, 'rename' , 'cmd.wizard("renaming","'+sele+'")' ],
[ 0, '' , '' ],
[ 1, 'delete' , 'cmd.delete("'+sele+'")' ],
]
def map_mesh(self_cmd, sele):
return [[ 2, 'Mesh:', '' ],
[ 1, '@ level 1.0' , 'cmd.isomesh("'+sele+'_mesh","'+sele+'",1.0)' ],
[ 0, '' , '' ],
[ 1, '@ level 2.0' , 'cmd.isomesh("'+sele+'_mesh","'+sele+'",2.0)' ],
[ 1, '@ level 3.0' , 'cmd.isomesh("'+sele+'_mesh","'+sele+'",3.0)' ],
[ 0, '' , '' ],
[ 1, '@ level 0.0' , 'cmd.isomesh("'+sele+'_mesh","'+sele+'",0.0)' ],
[ 1, '@ level -1.0' , 'cmd.isomesh("'+sele+'_mesh","'+sele+'",-1.0)' ],
[ 1, '@ level -2.0' , 'cmd.isomesh("'+sele+'_mesh","'+sele+'",-2.0)' ],
[ 1, '@ level -3.0' , 'cmd.isomesh("'+sele+'_mesh","'+sele+'",-3.0)' ],
]
def map_volume(self_cmd, sele):
return [[ 2, 'Volume:', ''],
[ 1, 'default' , 'cmd.volume("'+sele+'_volume","'+sele+'",1.0)' ]
]
def map_surface(self_cmd, sele):
return [[ 2, 'Surface:', '' ],
[ 1, '@ level 1.0' , 'cmd.isosurface("'+sele+'_surf","'+sele+'",1.0)' ],
[ 0, '' , '' ],
[ 1, '@ level 2.0' , 'cmd.isosurface("'+sele+'_surf","'+sele+'",2.0)' ],
[ 1, '@ level 3.0' , 'cmd.isosurface("'+sele+'_surf","'+sele+'",3.0)' ],
[ 0, '' , '' ],
[ 1, '@ level 0.0' , 'cmd.isosurface("'+sele+'_surf","'+sele+'",0.0)' ],
[ 1, '@ level -1.0' , 'cmd.isosurface("'+sele+'_surf","'+sele+'",-1.0)' ],
[ 1, '@ level -2.0' , 'cmd.isosurface("'+sele+'_surf","'+sele+'",-2.0)' ],
[ 1, '@ level -3.0' , 'cmd.isosurface("'+sele+'_surf","'+sele+'",-3.0)' ],
]
def map_gradient(self_cmd, sele):
return [[ 2, 'Gradient:', '' ],
[ 1, 'default' , 'cmd.gradient("'+sele+'_grad","'+sele+'");cmd.ramp_new("'+sele+
'_grad_ramp","'+sele+'");cmd.color("'+sele+'_grad_ramp","'+sele+'_grad");' ]
]
def map_slice(self_cmd, sele):
return [[ 2, 'Slice:', '' ],
[ 1, 'default' , 'cmd.slice_new("'+sele+'_slice","'+sele+'");cmd.ramp_new("'+sele+
'_slice_ramp","'+sele+'");cmd.color("'+sele+'_slice_ramp","'+sele+'_slice");'+
'cmd.set("slice_track_camera",1,"'+sele+'_slice");'+
'cmd.set("slice_dynamic_grid",1,"'+sele+'_slice")'],
]
def map_action(self_cmd, sele):
return [[ 2, 'Action:' , '' ],
[ 1, 'mesh' , map_mesh(self_cmd, sele) ],
[ 1, 'surface' , map_surface(self_cmd, sele) ],
[ 1, 'slice' , map_slice(self_cmd, sele) ],
[ 1, 'gradient' , map_gradient(self_cmd, sele) ],
[ 1, 'volume' , map_volume(self_cmd, sele) ],
[ 0, '' , '' ],
[ 1, 'zoom' , 'cmd.zoom("'+sele+'",animate=-1)' ],
[ 1, 'center' , 'cmd.center("'+sele+'",animate=-1)' ],
[ 1, 'origin' , 'cmd.origin("'+sele+'")' ],
[ 0, '' , '' ],
[ 1, 'drag' , 'cmd.drag("'+sele+'")' ],
[ 1, 'reset' , 'cmd.reset(object="'+sele+'")' ],
[ 0, '' , '' ],
[ 1, 'rename' , 'cmd.wizard("renaming","'+sele+'")' ],
[ 0, '' , '' ],
[ 1, 'delete' , 'cmd.delete("'+sele+'")' ],
]
def level(self_cmd, sele):
return [[ 2, 'Level', '' ],
[ 1, 'level 5.0' , 'cmd.isolevel("'+sele+'",5.0)' ],
[ 1, 'level 4.0' , 'cmd.isolevel("'+sele+'",4.0)' ],
[ 1, 'level 3.0' , 'cmd.isolevel("'+sele+'",3.0)' ],
[ 1, 'level 2.0' , 'cmd.isolevel("'+sele+'",2.0)' ],
[ 1, 'level 1.5' , 'cmd.isolevel("'+sele+'",1.5)' ],
[ 1, 'level 1.0' , 'cmd.isolevel("'+sele+'",1.0)' ],
[ 1, 'level 0.5' , 'cmd.isolevel("'+sele+'",0.5)' ],
[ 1, 'level 0.0' , 'cmd.isolevel("'+sele+'",0.0)' ],
[ 1, 'level -0.5' , 'cmd.isolevel("'+sele+'",-0.5)' ],
[ 1, 'level -1.0' , 'cmd.isolevel("'+sele+'",-1.0)' ],
[ 1, 'level -1.0' , 'cmd.isolevel("'+sele+'",-1.5)' ],
[ 1, 'level -2.0' , 'cmd.isolevel("'+sele+'",-2.0)' ],
[ 1, 'level -3.0' , 'cmd.isolevel("'+sele+'",-3.0)' ],
[ 1, 'level -4.0' , 'cmd.isolevel("'+sele+'",-4.0)' ],
[ 1, 'level -5.0' , 'cmd.isolevel("'+sele+'",-5.0)' ],
]
def surface_action(self_cmd, sele):
return [[ 2, 'Action:' , '' ],
[ 1, 'level' , level(self_cmd, sele) ],
[ 0, '' , '' ],
[ 1, 'zoom' , 'cmd.zoom("'+sele+'",animate=-1)' ],
[ 1, 'center' , 'cmd.center("'+sele+'",animate=-1)' ],
[ 1, 'origin' , 'cmd.origin("'+sele+'")' ],
[ 0, '' , '' ],
[ 1, 'drag' , 'cmd.drag("'+sele+'")' ],
[ 1, 'reset' , 'cmd.reset(object="'+sele+'")' ],
[ 0, '' , '' ],
[ 1, 'rename' , 'cmd.wizard("renaming","'+sele+'")' ],
[ 0, '' , '' ],
[ 1, 'delete' , 'cmd.delete("'+sele+'")' ],
]
def mesh_action(self_cmd, sele):
return [[ 2, 'Action:' , '' ],
[ 1, 'level' , level(self_cmd, sele) ],
[ 0, '' , '' ],
[ 1, 'zoom' , 'cmd.zoom("'+sele+'",animate=-1)' ],
[ 1, 'center' , 'cmd.center("'+sele+'",animate=-1)' ],
[ 1, 'origin' , 'cmd.origin("'+sele+'")' ],
[ 0, '' , '' ],
[ 1, 'drag' , 'cmd.drag("'+sele+'")' ],
[ 1, 'reset' , 'cmd.reset(object="'+sele+'")' ],
[ 0, '' , '' ],
[ 1, 'rename' , 'cmd.wizard("renaming","'+sele+'")' ],
[ 0, '' , '' ],
[ 1, 'delete' , 'cmd.delete("'+sele+'")' ],
]
def ramp_action(self_cmd, sele):
return [[ 2, 'Action:' , '' ],
[ 1, 'delete' , 'cmd.delete("'+sele+'")' ],
]
def test1(self_cmd, sele):
return [[ 2, 'Test1:' , '' ],
[ 1, 'zoom' , 'cmd.zoom("all",animate=-1)' ],
[ 1, 'center' , 'cmd.center("all",animate=-1)' ],
[ 1, 'origin' , 'cmd.origin("all")' ],
]
def test2(self_cmd, sele):
return [[ 2, 'Test2:' , '' ],
[ 1, 'zoom' , 'cmd.zoom("all",animate=-1)' ],
[ 1, 'center' , 'cmd.center("all",animate=-1)' ],
[ 1, 'origin' , 'cmd.origin("all")' ],
]
def all_action(self_cmd, sele):
return [[ 2, 'Action:' , '' ],
[ 1, 'zoom' , 'cmd.zoom("all",animate=-1)' ],
[ 1, 'center' , 'cmd.center("all",animate=-1)' ],
[ 1, 'origin' , 'cmd.origin("all")' ],
[ 0, '' , '' ],
[ 1, 'preset' , presets(self_cmd, "all") ],
[ 1, 'find', find(self_cmd, "all") ],
[ 0, '' ,'' ],
[ 1, 'hydrogens' ,hydrogens(self_cmd, sele) ],
# [ 1, 'add hydrogens' ,'cmd.h_add("'+sele+'")' ],
# [ 1, 'remove hydrogens' ,'cmd.remove("(hydro and ('+sele+'))")' ],
[ 1, 'remove waters' ,'cmd.remove("(solvent and ('+sele+'))")' ],
[ 0, '' , '' ],
[ 1, 'delete selections' , 'map(cmd.delete,cmd.get_names("selections"))' ],
[ 0, '' ,'' ],
[ 1, 'delete everything' , 'cmd.delete("all")' ],
[ 0, '' ,'' ],
[ 1, 'masking' , masking(self_cmd, sele) ],
[ 1, 'movement' , movement(self_cmd, sele) ],
[ 1, 'compute' , compute(self_cmd, sele) ],
]
def label_props(self_cmd, sele):
return [[ 2, 'Other Properties:' ,'' ],
[ 1, 'formal charge' ,
'cmd.label("'+sele+'","\'%d\'%formal_charge")' ],
[ 0, '' , '' ],
[ 1, 'partial charge (0.00)' ,
'cmd.label("'+sele+'","\'%.2f\'%partial_charge")' ],
[ 1, 'partial charge (0.0000)' ,
'cmd.label("'+sele+'","\'%.4f\'%partial_charge")' ],
[ 0, '' , '' ],
[ 1, 'elec. radius' , 'cmd.label("'+sele+'","\'%1.2f\'%elec_radius")' ],
[ 0, '' , '' ],
[ 1, 'text type' , 'cmd.label("'+sele+'","text_type")' ],
[ 1, 'numeric type' , 'cmd.label("'+sele+'","numeric_type")' ],
[ 0, '' , '' ],
[ 1, 'stereochemistry', 'cmd.label("'+sele+'","stereo")' ]
]
def label_ids(self_cmd, sele):
return [[ 2, 'Atom Identifiers:' ,'' ],
[ 1, 'rank' , 'cmd.label("'+sele+'","rank")' ],
[ 1, 'ID' , 'cmd.label("'+sele+'","ID")' ],
[ 1, 'index' , 'cmd.label("'+sele+'","index")' ],
]
def mol_labels(self_cmd, sele):
return [[ 2, 'Label:' , '' ],
[ 1, 'clear' , 'cmd.label("'+sele+'","\'\'")' ],
[ 0, '' , '' ],
[ 1, 'residues' , """cmd.label('''(name """+self_cmd.get("label_anchor")+"""+C1*+C1' and (byres("""+sele+""")))''','''"%s-%s"%(resn,resi)''')""" ],
# [ 1, 'residues' , """cmd.label('''(name ca+C1*+C1' and (byres("""+sele+""")))''','''"%s-%s"%(resn,resi)''')""" ],
[ 1, 'chains' , 'util.label_chains("'+sele+'",_self=cmd)' ],
[ 1, 'segments' , 'util.label_segments("'+sele+'",_self=cmd)' ],
[ 0, '' , '' ],
[ 1, 'atom name' , 'cmd.label("'+sele+'","name")' ],
[ 1, 'element symbol' , 'cmd.label("'+sele+'","elem")' ],
[ 1, 'residue name' , 'cmd.label("'+sele+'","resn")' ],
[ 1, 'residue identifier' , 'cmd.label("'+sele+'","resi")' ],
[ 1, 'chain identifier' , 'cmd.label("'+sele+'","chain")' ],
[ 1, 'segment identifier' , 'cmd.label("'+sele+'","segi")' ],
[ 0, '' , '' ],
[ 1, 'b-factor' , 'cmd.label("'+sele+'","\'%1.2f\'%b")' ],
[ 1, 'occupancy' , 'cmd.label("'+sele+'","\'%1.2f\'%q")' ],
[ 1, 'vdw radius' , 'cmd.label("'+sele+'","\'%1.2f\'%vdw")' ],
[ 0, '' , '' ],
[ 1, 'other properties' , label_props(self_cmd, sele) ],
[ 0, '' , '' ],
[ 1, 'atom identifiers' , label_ids(self_cmd, sele) ],
]
def mol_view(self_cmd, sele):
return [
[ 1, 'zoom' ,'cmd.zoom("'+sele+'",animate=-1)' ],
[ 1, 'orient' ,'cmd.orient("'+sele+'",animate=-1)' ],
[ 1, 'center' ,'cmd.center("'+sele+'",animate=-1)' ],
[ 1, 'origin' ,'cmd.origin("'+sele+'")' ],
]
def all_option(self_cmd, sele):
return [
[ 2, '(all)' , '' ],
[ 1, 'show' , mol_show(self_cmd, sele) ],
[ 1, 'hide' , mol_hide(self_cmd, sele) ],
[ 1, 'color' , mol_color(self_cmd, sele) ],
# [ 1, 'view' , mol_view(self_cmd, sele) ],
[ 1, 'preset' , presets(self_cmd, sele) ],
[ 0, '' , '' ],
[ 1, 'zoom' ,'cmd.zoom("'+sele+'",animate=-1)' ],
[ 1, 'orient' ,'cmd.orient("'+sele+'",animate=-1)' ],
[ 1, 'center' ,'cmd.center("'+sele+'",animate=-1)' ],
[ 1, 'origin' ,'cmd.origin("'+sele+'")' ],
[ 1, 'select' ,'cmd.select("'+sele+'",enable=1,merge=2)' ],
[ 0, '' , '' ],
[ 1, 'label' , mol_labels(self_cmd, sele) ],
[ 0, '', '' ],
[ 1, 'enable' ,'cmd.enable("'+sele+'")' ],
[ 1, 'disable' ,'cmd.disable("'+sele+'")' ],
]
def enable_disable(self_cmd, enable):
if enable:
result = [[ 2, 'Enable', '' ]]
cmmd = 'cmd.enable("'
else:
result = [[ 2, 'Disable', '']]
cmmd = 'cmd.disable("'
result = result + map(lambda ob,cm=cmmd:[1,ob,cm+ob+'")'],['all']+self_cmd.get_names('objects'))
if not enable:
result.insert(2,[1, 'selections', "util.hide_sele(_self=cmd)"])
else:
result2 = [[ 2, 'Selections', '']]
return result
def scene_buttons(self_cmd):
return [[ 2, 'Buttons', '' ],
[ 1, 'on', 'cmd.set("scene_buttons")'],
[ 1, 'off', 'cmd.unset("scene_buttons")']]
def scene_main(self_cmd):
list = self_cmd.get_scene_list()
recall_list = [ [2, 'Scenes' , ''] ]
for entry in list:
recall_list.append([1,entry,'cmd.scene("""'+entry+'""")'])
return [
[ 2, 'Scene', '' ],
[ 1, 'next' , 'cmd.scene()' ],
[ 0, '' , '' ],
[ 1, 'append' , 'cmd.scene("new","append",quiet=0)' ],
[ 1, 'update' , 'cmd.scene("auto","update",quiet=0)' ],
[ 0, '' , '' ],
[ 1, 'recall' , recall_list ],
[ 0, '' , '' ],
[ 1, 'buttons', scene_buttons(self_cmd)] ]
def main_pseudoatom_sub(self_cmd,pos):
return [
[ 2, 'Pseudoatom' ,'' ],
[ 1, 'label' ,'cmd.wizard("pseudoatom","label",pos=[%1.7f,%1.7f,%1.7f])'%pos ],
[ 0, '' , '' ],
[ 1, 'single ', 'cmd.pseudoatom(pos=[%1.7f,%1.7f,%1.7f])'%pos ],
]
def main_pseudoatom(self_cmd,pos):
return [
[ 2, 'New' , '' ],
[ 1, 'pseudoatom' , main_pseudoatom_sub(self_cmd,pos) ],
]
def movie_panel(self_cmd):
return [[ 2, 'Panel', '' ],
[ 1, 'on', 'cmd.set("movie_panel")'],
[ 1, 'off', 'cmd.unset("movie_panel")']]
def movie_main(self_cmd):
return [
[ 2, 'Movie', ''],
[ 1, 'play', 'cmd.mplay()'],
[ 1, 'stop', 'cmd.mstop()'],
[ 0, '', '' ],
[ 1, 'rewind', 'cmd.rewind()'],
[ 0, '', '' ],
[ 1, 'panel', movie_panel(self_cmd) ]
]
def main_menu(self_cmd,pos):
return [
[ 2, 'Main Pop-Up' , '' ],
[ 1, 'new' , main_pseudoatom(self_cmd,pos) ],
[ 0, '' , '' ],
[ 1, 'zoom (vis)' ,'cmd.zoom("visible",animate=-1)' ],
[ 1, 'orient (vis)' ,'cmd.orient("visible",animate=-1)' ],
[ 1, 'center (vis)' ,'cmd.center("visible",animate=-1)' ],
[ 1, 'reset' ,'cmd.reset()' ],
[ 0, '' , '' ],
[ 1, 'movie' , movie_main(self_cmd) ],
[ 1, 'scene' , scene_main(self_cmd) ],
[ 0, '' , '' ],
[ 1, 'enable', enable_disable(self_cmd, 1) ],
[ 1, 'disable', enable_disable(self_cmd,0) ],
[ 0, '' , '' ],
[ 1, '(all)' , all_option(self_cmd,"all") ],
[ 1, '(visible)' , all_option(self_cmd,"visible") ],
[ 0, '' , '' ],
[ 1, 'ray' ,'cmd.ray()' ],
[ 0, '' , '' ],
[ 1, 'delete all' ,'cmd.delete("all")' ],
[ 1, 'reinitialize' ,'cmd.reinitialize()' ],
[ 1, 'quit' ,'cmd.quit()' ],
]
def pick_sele_sub(self_cmd, sele):
result = [
[ 2, 'Actions' , '' ],
[ 1, 'rename', 'cmd.wizard("renaming","'+sele+'")' ],
[ 1, 'clear' , 'cmd.select("'+sele+'","none")' ],
[ 1, 'delete selection', 'cmd.delete("'+sele+'")' ],
[ 1, 'copy to object','cmd.create(None,"'+sele+'",zoom=0)' ],
[ 1, 'extract object' ,'cmd.extract(None,"'+sele+'",zoom=0)' ],
[ 1, 'remove atoms' , 'cmd.remove("'+sele+'")' ],
]
return result
def pick_sele(self_cmd, sele, title):
result = [
[ 2, title, '' ],
[ 1, 'disable' , 'cmd.disable("'+sele+'")' ],
[ 0, '' , '' ],
[ 1, 'actions', sele_action2(self_cmd, sele) ],
[ 0, '' , '' ],
[ 1, 'color' , mol_color(self_cmd, sele) ],
[ 1, 'show' , mol_show(self_cmd, sele) ],
[ 1, 'hide' , mol_hide(self_cmd, sele) ],
[ 1, 'preset' , presets(self_cmd, sele) ],
[ 1, 'label' , mol_labels(self_cmd, sele) ],
[ 0, '' , '' ],
[ 1, 'zoom' ,'cmd.zoom("'+sele+'",animate=-1)' ],
[ 1, 'orient' ,'cmd.orient("'+sele+'",animate=-1)' ],
[ 1, 'center' ,'cmd.center("'+sele+'",animate=-1)' ],
[ 1, 'origin' ,'cmd.origin("'+sele+'")' ],
[ 0, '' ,'' ],
[ 1, 'drag' ,'cmd.drag("'+sele+'")' ],
[ 1, 'clean' ,'cmd.clean("'+sele+'")' ],
[ 0, '' ,'' ],
[ 1, 'remove' ,'cmd.remove("'+sele+'")' ],
]
return result
def pick_option(self_cmd, sele, title, object=0):
if object:
save_sele = sele
sele = "(byobj ("+sele+"))"
else:
save_sele = sele
result = [
[ 2, title, '' ],
[ 1, 'color' , mol_color(self_cmd, sele) ],
[ 1, 'show' , mol_show(self_cmd, sele) ],
[ 1, 'hide' , mol_hide(self_cmd, sele) ],
[ 1, 'preset' , presets(self_cmd, sele) ],
[ 1, 'label' , mol_labels(self_cmd, sele) ],
[ 0, '' , '' ],
[ 1, 'zoom' ,'cmd.zoom("'+sele+'",animate=-1)' ],
[ 1, 'orient' ,'cmd.orient("'+sele+'",animate=-1)' ],
[ 1, 'center' ,'cmd.center("'+sele+'",animate=-1)' ],
[ 1, 'origin' ,'cmd.origin("'+sele+'")' ],
[ 1, 'select' ,'cmd.select("'+sele+'",enable=1,merge=2)' ],
[ 0, '' ,'' ]]
if object:
result.append([ 1, 'drag' , [[ 1, 'coordinates', 'cmd.drag("'+sele+'")'],
[ 1, 'matrix', 'cmd.drag("'+save_sele+'",mode=1)']]])
else:
result.append([ 1, 'drag' , 'cmd.drag("'+sele+'")'])
result.extend([
[ 1, 'clean' ,'cmd.clean("'+sele+'")' ],
[ 1, 'masking' , masking(self_cmd, sele) ],
[ 1, 'movement' , movement(self_cmd, sele) ],
])
if object:
result.extend([
[ 1, 'delete' ,'cmd.delete("'+sele+'")' ],
[ 0, '' , '' ],
[ 1, 'disable' ,'cmd.disable("'+sele+'")' ],
])
else:
result.extend([
[ 1, 'remove atoms' , 'cmd.remove("'+sele+'")' ],
[ 0, '' , '' ],
[ 1, 'copy to object','cmd.create(None,"'+sele+'",zoom=0)' ],
[ 1, 'extract object' ,'cmd.extract(None,"'+sele+'",zoom=0)' ],
])
return result
def pick_option_rev(self_cmd, sele, title, object=0):
result = pick_option(self_cmd, sele, title, object)[1:]
result.reverse()
return result
def pick_menu(self_cmd, sele1, sele2):
if sele1[-1]=='`':
title = sele1[0:-1]
else:
title = sele1
return [[ 2, title , '' ],
[ 1, 'drag object matrix' ,'cmd.drag("(byobj ('+sele2+'))",mode=1)' ],
[ 1, 'drag object coords' ,'cmd.drag("(byobj ('+sele2+'))")' ],
[ 0, '' , '' ],
[ 1, 'atom' , pick_option(self_cmd, sele2, "Atom") ],
[ 1, 'residue' , pick_option(self_cmd, "(byres ("+sele2+"))", "Residue") ],
[ 1, 'chain' , pick_option(self_cmd, "(bychain ("+sele2+"))", "Chain") ],
[ 1, 'segment' , pick_option(self_cmd, "(byseg ("+sele2+"))", "Segment") ],
[ 1, 'object' , pick_option(self_cmd, sele2, "Object",1) ],
[ 0, '' , '' ],
[ 1, 'molecule', pick_option(self_cmd, "(bymol ("+sele2+"))", "Molecule") ],
[ 0, '' , '' ],
[ 1, 'fragment', pick_option(self_cmd, "(byfrag ("+sele2+"))", "Fragment") ],
[ 1, 'fragment+joint(s)', pick_option(self_cmd, "((byfrag ("+sele2+")) extend 1)", "Fragment") ],
]
def seq_menu(sele2,sele3): # obsolete/unused?
return [[ 2, 'Sequence' , '' ],
[ 1, 'selection', pick_option(self_cmd, sele3, '('+sele3+')') ],
[ 0, '' , '' ],
[ 1, 'residue' , pick_option(self_cmd, "(byres ("+sele2+"))", "Residue",) ],
[ 1, 'chain' , pick_option(self_cmd, "(bychain ("+sele2+"))", "Chain",) ],
[ 1, 'segment' , pick_option(self_cmd, "(byseg ("+sele2+"))", "Segment",) ],
[ 1, 'object' , pick_option(self_cmd, "(byobject ("+sele2+"))", "Object",1) ],
[ 0, '' , '' ],
[ 1, 'molecule', pick_option(self_cmd, "(bymol ("+sele2+"))", "Molecule") ],
[ 0, '' , '' ],
[ 1, 'C-alpha' , pick_option(self_cmd, "(bycalpha ("+sele2+"))", "C-alpha") ],
]
def seq_option(self_cmd, sele, title, object=0):
c=len(title)-1
while title[c]!='/':
c = c-1
title = title[0:c+1]
result = [
[ 2, title, '' ],
[ 1, 'color' , mol_color(self_cmd, sele) ],
[ 1, 'show' , mol_show(self_cmd, sele) ],
[ 1, 'hide' , mol_hide(self_cmd, sele) ],
[ 1, 'preset' , presets(self_cmd, sele) ],
[ 1, 'label' , mol_labels(self_cmd, sele) ],
[ 0, '' , '' ],
[ 1, 'zoom' ,'cmd.zoom("'+sele+'",animate=-1)' ],
[ 1, 'orient' ,'cmd.orient("'+sele+'",animate=-1)' ],
[ 1, 'center' ,'cmd.center("'+sele+'",animate=-1)' ],
[ 1, 'origin' ,'cmd.origin("'+sele+'")' ],
[ 1, 'select' ,'cmd.select("'+sele+'",enable=1,merge=2)' ],
[ 0, '' ,'' ],
[ 1, 'drag' ,'cmd.drag("'+sele+'")' ],
[ 1, 'clean' ,'cmd.clean("'+sele+'")' ],
]
if object:
result.extend([
[ 0, '' , '' ],
[ 1, 'disable' ,'cmd.disable("'+sele+'")' ],
[ 0, '' , '' ],
[ 1, 'delete' ,'cmd.delete("'+sele+'")' ]
])
else:
result.extend([
[ 0, '' , '' ],
[ 1, 'create object','cmd.create(None,"'+sele+'",zoom=0)' ],
[ 1, 'extract object' ,'cmd.extract(None,"'+sele+'",zoom=0)' ],
[ 0, '' , '' ],
[ 1, 'remove atoms' , 'cmd.remove("'+sele+'")' ],
])
return result
def scene_menu(self_cmd, name):
safe_name = name.replace('"','\\"') # just in case
return [[ 2, 'Scene '+name , '' ],
[ 1, 'rename', 'cmd.wizard("renaming","'+name+'",mode="scene")' ],
[ 0, '' , '' ],
[ 1, 'update', 'cmd.scene("'+safe_name+'","update")'],
[ 0, '' , '' ],
[ 1, 'delete', 'cmd.scene("'+safe_name+'","delete")'],
]
| 56.434234
| 168
| 0.375573
|
94e26c07af05ecd3333606dafba9b1a75a2fe459
| 14,999
|
py
|
Python
|
experimenter/evaluation.py
|
Forethought-Technologies/experimenter
|
d3775b405ca1fa06f24478996a4330d43f1e954e
|
[
"MIT"
] | null | null | null |
experimenter/evaluation.py
|
Forethought-Technologies/experimenter
|
d3775b405ca1fa06f24478996a4330d43f1e954e
|
[
"MIT"
] | null | null | null |
experimenter/evaluation.py
|
Forethought-Technologies/experimenter
|
d3775b405ca1fa06f24478996a4330d43f1e954e
|
[
"MIT"
] | 1
|
2021-04-23T21:52:31.000Z
|
2021-04-23T21:52:31.000Z
|
import logging
import numpy as np
import torch
from experimenter.utils import utils as U
class Accuracy:
def __call__(self, prediction: torch.Tensor, label: torch.Tensor):
"""Get accuracy of a multi-class classification for a batch
Args:
prediction: Batch of shape (batch_size, num_classes)
label: Batch true labels of shape (batch_size)
Returns:
score: scalar of accuracy for this bach
"""
# assert isinstance(prediction, torch.Tensor)
# assert isinstance(label, torch.Tensor)
logging.debug(f"prediction: {prediction}")
logging.debug(f"true label: {label}")
corrects = 0
total = 0
for pred, label in zip(prediction, label):
corrects += int(pred == label)
total += 1
val_score = corrects
val_score /= total
logging.debug(f"Val_score: {val_score}")
return val_score
class Dummy:
def __call__(self, prediction: torch.Tensor, label: torch.Tensor):
"""A dummy class that returns 0 to be used with multi-task outputs that are not of interest
Args:
prediction: Batch of shape (batch_size, num_classes)
label: Batch true labels of shape (batch_size)
Returns:
score: zero
"""
return 0
class ListEvaluator:
def __init__(self, config):
self.logger = logging.getLogger(self.__class__.__name__)
loss_f = config["evaluator"]["params"]["loss_f"]
metrics_f = config["evaluator"]["params"]["metrics_f"]
self.device = config["device"]
self.loss_f = []
for f in loss_f:
U.evaluate_params(f["params"], locals())
self.loss_f.append(
U.load_class(
f["module"], f["class"], f["params"], pass_params_as_dict=True
)
)
self.metrics_f = []
for f in metrics_f:
U.evaluate_params(f["params"], locals())
self.metrics_f.append(
U.load_class(
f["module"], f["class"], f["params"], pass_params_as_dict=True
)
)
self.reset()
def update_batch(self, data):
# floss = 0
res = self.get_initial_loss()
for k, f in enumerate(self.metrics_f):
res[k] += f(data["pred"][k], data["label"][k])
self.num_items += data["inp"][0].shape[
0
] # Add one batch count #data['inp'][0].shape[0]
loss = [
current_metric + (metric * data["inp"][0].shape[0])
for current_metric, metric in zip(self.current_metrics, res)
]
self.current_metrics = loss
return [metric / self.num_items for metric in self.current_metrics]
def get_metrics(self, data):
# Iterate through all outputs / losses
res = []
for k, f in enumerate(self.metrics_f):
# # for each loss, multiply with the mask
# # Do proper casting based on f_func instance type
res.append(f(data["pred"][k], data["label"][k]))
# floss += tmp_loss * b[2][k]
#
# val_loss = (val_loss * self.num_items) + (floss)
# self.num_items += data[0][0].shape[0]
# val_loss /= self.num_items
return res
# Get score
def update_batch_loss(self, data, aggregate="mean"):
"""Mimics the update_batch but operates on the loss function not the metric function.
Logis is same as call but maintains a global state across batches.
Assumes the loss function either has shape [batch, probs by class
or real value for regression] for loss per example or [batch,seq_len, probs]
for when prediction is for a sequence (lm / conversation / multi-class)
in which case it sums over dimention 1."""
floss = [0] * len(self.loss_f) # loss across all losses in batch
batch_items = [0] * len(self.loss_f) # total tokens in batch
# Iterate through all outputs / losses
if aggregate not in ("mean"):
raise AttributeError(
"Expecting aggregate attribute to be mean or sum, got {}".format(
aggregate
)
)
self.logger.debug("Evaluating a batch ==================")
if aggregate == "mean":
for k, f in enumerate(self.loss_f):
# for each loss, multiply with the mask
# Do proper casting based on f_func instance type
tmp_loss = self._applyloss(f, data["out"][k], data["label"][k])
self.logger.debug(f"Evaluator tmp loss{k}: {tmp_loss}")
# We consider each example to count as 1 (class case).
# Will override if it's a sequence
b_size = data["label"][k].shape[0]
self.logger.debug(f"Batch size: {b_size}")
if tmp_loss.dim() > 1:
# label is a sequence of labels not a single label
tmp_loss = tmp_loss.sum(
dim=1
) # sum over the sequence length resulting in [batch_num,]
tmp_loss = tmp_loss * data["mask"][k]
num_items = (
(data["label"][k] > 0)
.type(torch.DoubleTensor)
.to(self.device)
.sum(dim=1)
) # Override num_items to be actual tokens TODO: replace 0 with ignore index
self.logger.debug(
f"Number of tokens in all sequences in batch: {num_items}"
)
# weight each example by it's total tokens. Shape: [batch_size, }
# tmp_loss = tmp_loss * num_items
num_items = (num_items * (data["mask"][k] > 0)).sum()
self.logger.debug(
f"Number of tokens after multiplying with mask: {num_items}"
)
else:
tmp_loss = tmp_loss * data["mask"][k]
num_items = (
torch.tensor(1).type(torch.DoubleTensor).to(self.device)
) # Assume each example is 1. will be broadcasted across batch_size
self.logger.debug(
f"Number of tokens in all sequences in batch: {num_items}"
)
num_items = (num_items * (data["mask"][k] > 0)).sum()
self.logger.debug(
f"Number of tokens after multiplying with mask: {num_items}"
)
assert num_items == (data["mask"][k] > 0).sum()
# tmp_loss = tmp_loss * num_items
# tmp_loss = (tmp_loss * data['mask'][k])
# num_items = (num_items * (data['mask'][k] > 0)).sum()
# self.logger.debug(f"Number of tokens after multiplying with mask: {num_items}")
num_items = num_items.data.cpu().numpy()
if num_items == 0:
tmp_loss = 0
else:
tmp_loss = tmp_loss.sum().data.cpu().numpy() # / num_items
# tmp_loss /= b_size
floss[k] += tmp_loss # mean should be updated to sum / none and other
batch_items[k] += num_items
self.logger.debug("Evaluator sum loss across losses: {}".format(floss))
self.logger.debug(
"Evaluator Batch total items across losses: {}".format(batch_items)
)
self.num_items_loss[k] += batch_items[
k
] # Add one batch count #data['inp'][0].shape[0]
self.logger.debug(
"Evaluator total ruunning sum of items across batches: {}".format(
self.num_items_loss
)
)
# need to calculate sum weighted by total items in batch
self.current_loss[k] = self.current_loss[k] + (floss[k])
# Sum of all loss across batches
# self.current_loss = loss
# Return average loss to this point
return [f / i for f, i in zip(self.current_loss, self.num_items_loss)]
def __call__(self, data, aggregate="mean"):
"""Called during training step to get a single loss value and backpropagate"""
floss = 0
batch_items = 0
# Iterate through all outputs / losses
if aggregate not in ("mean"):
raise AttributeError(
"Expecting aggregate attribute to be mean or sum, got {}".format(
aggregate
)
)
self.logger.debug(
"Inside calling method. calculating loss ======================== "
)
for k, f in enumerate(self.loss_f):
# for each loss, multiply with the mask
# Do proper casting based on f_func instance type
# self.logger.debug(f"Evaluator Whole batch: {data}")
tmp_loss = self._applyloss(f, data["out"][k], data["label"][k])
self.logger.debug(f"Evaluator tmp loss{k}: {tmp_loss}")
# We consider each example to count as 1 (class case).
# Will override if it's a sequence
b_size = data["label"][k].shape[0]
self.logger.debug(f"Batch size: {b_size}")
if tmp_loss.dim() > 1:
# label is a sequence of labels not a single label
tmp_loss = tmp_loss.sum(
dim=1
) # sum over the sequence length resulting in [batch_num,]
tmp_loss = tmp_loss * data["mask"][k]
num_items = (
(data["label"][k] > 0)
.type(torch.DoubleTensor)
.to(self.device)
.sum(dim=1)
) # Override num_items to be actual tokens TODO: replace 0 with ignore index
self.logger.debug(
f"Number of tokens in all sequences in batch: {num_items}"
)
# weight each example by it's total tokens. Shape: [batch_size, }
# tmp_loss = tmp_loss * num_items
num_items = (num_items * (data["mask"][k] > 0)).sum()
self.logger.debug(
f"Number of tokens after multiplying with mask: {num_items}"
)
else:
tmp_loss = tmp_loss * data["mask"][k]
num_items = (
torch.tensor(1).type(torch.DoubleTensor).to(self.device)
) # Assume each example is 1. will be broadcasted across batch_size
self.logger.debug(
f"Number of tokens in all sequences in batch: {num_items}"
)
num_items = (num_items * (data["mask"][k] > 0)).sum()
self.logger.debug(
f"Number of tokens after multiplying with mask: {num_items}"
)
assert num_items == (data["mask"][k] > 0).sum()
# tmp_loss = tmp_loss * num_items
# tmp_loss = (tmp_loss * data['mask'][k])
# num_items = (num_items * (data['mask'][k] > 0)).sum()
# self.logger.debug(f"Number of tokens after multiplying with mask: {num_items}")
# num_items = num_items
tmp_loss = tmp_loss.sum() # / num_items
floss += tmp_loss # mean should be updated to sum / none and other
batch_items += num_items
return floss / batch_items
for k, f in enumerate(self.loss_f):
# for each loss, multiply with the mask
# Do proper casting based on f_func instance type
tmp_loss = self._applyloss(f, data["out"][k], data["label"][k])
self.logger.debug(f"Evaluator - labels[{k}]: {data['label'][k]}")
self.logger.debug(f"Evaluator - output[{k}]: {data['out'][k]}")
# tmp_loss = f(preds[0][k], y[k].squeeze().type(torch.LongTensor))
self.logger.debug(f"Evaluator tmp loss{k}: {tmp_loss}")
# We consider each example to count as 1 (class case). Will override if it's a sequence
num_items = data["label"][k].shape[0]
if tmp_loss.dim() > 1:
tmp_loss = tmp_loss.sum(
dim=1
) # sum over the sequence length resulting in [batch_num,]
if aggregate == "mean":
num_items = (
(data["label"][k] > 0).type(torch.DoubleTensor).sum(dim=1)
)
tmp_loss = tmp_loss * num_items
self.logger.debug(f"Evaluator tmp loss {k} after summation: {tmp_loss}")
tmp_loss = tmp_loss * data["mask"][k]
if aggregate == "mean":
num_items = (num_items * (data["mask"][k] > 0)).sum()
self.logger.debug(f"Number of items after masking: {num_items}")
if num_items == 0:
tmp_loss = 0
else:
tmp_loss = tmp_loss.sum() / num_items
if aggregate == "sum":
tmp_loss = tmp_loss.sum()
floss += tmp_loss # mean should be updated to sum / none and other
self.logger.debug("Evaluator loss: {}".format(floss))
return floss
def _applyloss(self, f, output, label):
"""Calls the loss function with no aggregation.
Should return either [batch_size,] for class or [batch_size, seq_len]
for sequence classes"""
if isinstance(f, torch.nn.CrossEntropyLoss):
if self.device == "cuda":
tmp_loss = f(output, label.squeeze().type(torch.cuda.LongTensor))
else:
tmp_loss = f(output, label.squeeze().type(torch.LongTensor))
return tmp_loss
def reset(self):
# Initialization for metrics
self.num_items = 0
self.current_metrics = self.get_initial_loss()
# Initialization for losses
self.num_items_loss = [0] * len(self.loss_f)
self.current_loss = [0] * len(self.loss_f)
def get_initial_loss(self):
res = [0] * len(self.metrics_f)
return res
def get_worst_metric(self):
return [0] * len(self.metrics_f)
def get_worst_loss(self):
return [np.inf] * len(self.metrics_f)
def isbetter(self, a, b, is_metric=True):
"""If is metric, we're assuming higher is better
(think accuracy), else, it's a loss and lower is better"""
# Bad implementation, find a way to compare other metrics
if is_metric:
return np.all(a > b)
else:
return np.all(a < b)
| 41.093151
| 99
| 0.525502
|
e879737d5c8b5e435e33afc5d6b7e6df34a643ec
| 3,773
|
py
|
Python
|
utilities/bin_driams.py
|
BorgwardtLab/maldi-learn
|
8333b6facde7325e36adb2d065ccb6d4f841b4e3
|
[
"BSD-3-Clause"
] | 11
|
2020-07-30T15:33:31.000Z
|
2022-03-18T08:26:20.000Z
|
utilities/bin_driams.py
|
BorgwardtLab/maldi-learn
|
8333b6facde7325e36adb2d065ccb6d4f841b4e3
|
[
"BSD-3-Clause"
] | 1
|
2021-01-18T04:55:14.000Z
|
2021-01-19T10:18:34.000Z
|
utilities/bin_driams.py
|
BorgwardtLab/maldi-learn
|
8333b6facde7325e36adb2d065ccb6d4f841b4e3
|
[
"BSD-3-Clause"
] | 1
|
2021-09-07T04:27:32.000Z
|
2021-09-07T04:27:32.000Z
|
"""Bin the DRIAMS data set and save it.
The purpose of this script is to perform binning of the DRIAMS data set
and store the resulting spectra as preprocessed files. This speeds up a
large number of downstream classification tasks.
"""
import argparse
import dotenv
import os
import pandas as pd
from maldi_learn.driams import DRIAMSDatasetExplorer
from maldi_learn.driams import DRIAMSLabelEncoder
from maldi_learn.driams import load_driams_dataset
from maldi_learn.vectorization import BinningVectorizer
from tqdm import tqdm
dotenv.load_dotenv()
DRIAMS_ROOT = os.getenv('DRIAMS_ROOT')
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument(
'-s', '--site',
default='DRIAMS-A',
type=str,
help='Site to pre-process')
parser.add_argument(
'-y', '--years',
default=['2015', '2016', '2017', '2018'],
type=str,
nargs='+',
help='Years to pre-process'
)
parser.add_argument(
'-b', '--bins',
type=int,
required=True,
help='Number of bins to use for binning transformation'
)
args = parser.parse_args()
# Get all available antibiotics for the selected site. We will
# pre-process *all* the spectra.
explorer = DRIAMSDatasetExplorer(DRIAMS_ROOT)
antibiotics = explorer.available_antibiotics(args.site)
# Process each year separately, because that simplifies assigning
# the output files.
for year in tqdm(args.years, desc='Year'):
driams_dataset = load_driams_dataset(
explorer.root,
args.site,
year,
'*', # Load all species; we do *not* want to filter anything
antibiotics[year],
handle_missing_resistance_measurements='keep', # Keep all
)
# Follows the same hierarchy as the other data sets. For
# example, if site DRIAMS-A is being pre-processed, each
# file will be stored in
#
# $ROOT/DRIAMS-A/binned_$BINS/$YEAR
#
# for $BINS bins in the histogram. This makes re-loading
# pre-processed spectra ridiculously easy.
output_directory = os.path.join(
explorer.root,
args.site,
f'binned_{args.bins}',
year
)
os.makedirs(output_directory, exist_ok=True)
bv = BinningVectorizer(
args.bins,
min_bin=2000,
max_bin=20000,
n_jobs=-1 # Use all available cores to perform the processing
)
codes = driams_dataset.y['code'].values
for spectrum, code in tqdm(zip(driams_dataset.X, codes),
total=len(codes),
desc='Spectrum'):
output_file = os.path.join(
output_directory,
f'{code}.txt'
)
# Might change this behaviour in the future, but for now,
# let's play it safe and not overwrite anything.
if os.path.exists(output_file):
continue
# This has the added advantage that we now *see* whenever
# a new spectrum is being stored.
tqdm.write(code)
X = bv.fit_transform([spectrum])[0]
# Turn the spectrum vector into a data frame that tries to
# at least partially maintain a description. This also has
# the advantage of automatically generating an index.
df = pd.DataFrame({'binned_intensity': X})
df.index.name = 'bin_index'
# Use a proper separator to be compatible with our reader.
df.to_csv(output_file, sep=' ')
| 30.674797
| 78
| 0.594487
|
34a539c28ec1c5eb50cf43b974a8061039292a86
| 20,483
|
py
|
Python
|
src/katsdpmetawriter/__init__.py
|
ska-sa/katsdpmetawriter
|
80ef5ea910882ea25c1f2ad06a74102f9fb246b8
|
[
"BSD-3-Clause"
] | null | null | null |
src/katsdpmetawriter/__init__.py
|
ska-sa/katsdpmetawriter
|
80ef5ea910882ea25c1f2ad06a74102f9fb246b8
|
[
"BSD-3-Clause"
] | 3
|
2020-06-09T20:08:08.000Z
|
2021-11-08T14:24:24.000Z
|
src/katsdpmetawriter/__init__.py
|
ska-sa/katsdpmetawriter
|
80ef5ea910882ea25c1f2ad06a74102f9fb246b8
|
[
"BSD-3-Clause"
] | null | null | null |
################################################################################
# Copyright (c) 2018-2020, National Research Foundation (SARAO)
#
# Licensed under the BSD 3-Clause License (the "License"); you may not use
# this file except in compliance with the License. You may obtain a copy
# of the License at
#
# https://opensource.org/licenses/BSD-3-Clause
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
################################################################################
"""Serialise a view of the Telescope State for the current observation to long term storage.
The Telescope State (TS) is a meta-data repository that includes information about the
current state of the wider telescope, configuration data and intermediate SDP products
such as calibration solutions. It also contains references to the data objects that
comprise the visibility data captured for an observation.
The MeerKAT data access library (katdal) uses the TS effectively as the 'file'
representation of an observation. Such a 'file' can be opened by pointing katdal to either
a live TS repository (currently Redis-backed) or to a serialised representation
of the TS (currently supports Redis RDB format).
This writer, when requested, can create two different views of the TS and save these into
long-term storage.
The first view is a lightweight representation of the TS containing the basic data
to allow katdal to open an observation. This includes data such as captured timestamps,
storage configuration information and observation targets. Using the supplied capture block
ID, an attempt is also made to only record meta information specific to that capture
block ID.
The second is a complete dump of the entire TS. This may contain meta-data from
other capture sessions.
"""
import logging
import os
import socket
import asyncio
import time
import enum
import pathlib
from collections import deque
# async_generator provides a backport of Python 3.7's asynccontextmanager
from async_generator import asynccontextmanager
import aiobotocore.config
import botocore.exceptions
import katsdptelstate
from katsdptelstate.aio.rdb_writer import RDBWriter
from aiokatcp import DeviceServer, Sensor, FailReply
# BEGIN VERSION CHECK
# Get package version when locally imported from repo or via -e develop install
try:
import katversion as _katversion
except ImportError:
import time as _time
__version__ = "0.0+unknown.{}".format(_time.strftime('%Y%m%d%H%M'))
else:
__version__ = _katversion.get_version(__path__[0]) # type: ignore
# END VERSION CHECK
logger = logging.getLogger(__name__)
# Fairly arbitrary limit on number of concurrent meta data writes
# that we allow. Tradeoff between not stopping observations and
# taking too long to discover some blocking fault.
MAX_ASYNC_TASKS = 10
# Template of key names that we would like to preserve when dumping
# a lite version of Telstate. Since we always back observations with
# a full dump of Telstate, we don't fail on missing entries, but do
# log them.
# Each string entry is format'ed with the following substitutions:
#
# {cb}: The capture block ID which uniquely identifies this data capture
# {sn}: The name of a specific stream in the capture (e.g. sdp_l0)
# m???: A special that represents a glob-style pattern match used to
# do wildcard MeerKAT antenna name matching for certain keys.
# s????: A special that represents a glob-style pattern match used to
# do wildcard SKA prototype antenna name matching for certain keys.
#
LITE_KEYS = [
"{sn}_int_time",
"{sn}_sync_time",
"{sn}_bls_ordering",
"{sn}_n_chans",
"{sn}_bandwidth",
"{sn}_center_freq",
"{sn}_s3_endpoint_url",
"{sn}_stream_type",
"{sn}_need_weights_power_scale",
"{cb}_obs_params",
"{cb}_obs_script_log",
"{cb}_obs_label",
"{cb}_obs_activity",
"{cb}_{sn}_chunk_info",
"{cb}_{sn}_first_timestamp",
"sub_pool_resources",
"sub_band",
"sub_product",
"m???_observer",
"m???_activity",
"m???_target",
"s????_observer",
"s????_activity",
"s????_target",
"cbf_target",
"sdp_config"
]
def timer():
"""Get timestamp for measuring elapsed time.
This is a wrapper that's made so that it can be mocked easily.
"""
return time.monotonic()
def make_botocore_dict(s3_args):
"""Create a dict suitable for passing into aiobotocore using the supplied args."""
return {
"aws_access_key_id": s3_args.access_key,
"aws_secret_access_key": s3_args.secret_key,
"endpoint_url": f"http://{s3_args.s3_host}:{s3_args.s3_port}",
"use_ssl": False,
"config": aiobotocore.config.AioConfig(s3={"addressing_style": "path"})
}
async def get_lite_keys(telstate, capture_block_id, stream_name):
"""Uses capture_block_id and stream_name, along with the template
of keys to store in the lite dump, to build a full list of the keys
to be dumped.
Note: We avoid using telstate views here since we want to write fully
qualified keys into the lite database to easily allow merge later on.
The philosophy of the lite dump is to change as little as possible.
"""
keys = []
for key in LITE_KEYS:
if key.find('?') >= 0:
keys.extend(await telstate.keys(filter=key))
else:
keys.append(key.format(cb=capture_block_id, sn=stream_name))
return keys
@asynccontextmanager
async def get_s3_connection(botocore_dict, fail_on_boto=False):
"""Test the connection to S3 as described in the args.
Return the connection object.
In general we are more concerned with informing the user why the
connection failed, rather than raising exceptions. Users should always
check the return value and make appropriate decisions.
If set, fail_on_boto will not suppress boto exceptions. Used when verifying
credentials.
Returns
-------
s3_conn : S3Connection
A connection to the s3 endpoint. None if a connection error occurred.
"""
session = aiobotocore.session.get_session()
try:
# reliable way to test connection and access keys
async with session.create_client('s3', **botocore_dict) as s3_conn:
await s3_conn.list_buckets()
yield s3_conn
return
except socket.error as e:
logger.error(
"Failed to connect to S3 host %s. Please check network and host address. (%s)",
botocore_dict['endpoint_url'], e)
except botocore.exceptions.ClientError as e:
error_code = e.response.get('Error', {}).get('Code')
if error_code == 'InvalidAccessKeyId':
logger.error(
"Supplied access key %s is not a valid S3 user.",
botocore_dict['aws_access_key_id'])
elif error_code == 'SignatureDoesNotMatch':
logger.error("Supplied secret key is not valid for specified user.")
elif error_code == 'AccessDenied':
logger.error(
"Supplied access key (%s) has no permissions on this server.",
botocore_dict['aws_access_key_id'])
else:
logger.error(e)
if fail_on_boto:
raise
yield None
async def _write_rdb(ctx, telstate, dump_filename, capture_block_id, stream_name,
botocore_dict, key_name, lite=True):
"""Synchronous code used to create an on-disk dump.
If a `botocore_dict` is supplied, upload the dump to S3, with the name
`key_name`.
"""
keys = None
if lite:
keys = await get_lite_keys(telstate, capture_block_id, stream_name)
logger.info(
"Writing %s keys to local RDB dump %s",
str(len(keys)) if lite else "all", dump_filename)
supplemental_telstate = katsdptelstate.aio.TelescopeState()
await supplemental_telstate.set('stream_name', stream_name)
await supplemental_telstate.set('capture_block_id', capture_block_id)
with RDBWriter(dump_filename) as rdbw:
await rdbw.save(telstate, keys)
if rdbw.keys_written > 0:
await rdbw.save(supplemental_telstate)
key_errors = rdbw.keys_failed
if not rdbw.keys_written:
logger.error("No valid telstate keys found for %s_%s", capture_block_id, stream_name)
return (None, key_errors)
logger.info("Write complete. %s errors", key_errors)
ctx.inform(
"RDB extract and write for {}_{} complete. {} errors"
.format(capture_block_id, stream_name, key_errors))
if not botocore_dict:
return (None, key_errors)
async with get_s3_connection(botocore_dict) as s3_conn:
if not s3_conn:
logger.error("Unable to store RDB dump in S3.")
return (None, key_errors)
file_size = os.path.getsize(dump_filename)
rate_bytes = 0
written_bytes = 0
try:
await s3_conn.create_bucket(Bucket=capture_block_id)
st = timer()
with open(dump_filename, 'rb') as dump_data:
await s3_conn.put_object(
Bucket=capture_block_id,
Key=key_name,
Body=dump_data)
written_bytes = dump_data.tell()
rate_bytes = written_bytes / (timer() - st)
except botocore.exceptions.ClientError as e:
status = e.response.get('ResponseMetadata', {}).get('HTTPStatusCode')
if status in {403, 409}:
logger.error(
"Unable to store RDB dump as access key %s "
"does not have permission to write to bucket %s",
botocore_dict["aws_access_key_id"], capture_block_id)
return (None, key_errors)
elif status == 404:
logger.error(
"Unable to store RDB dump as the bucket %s or key %s has been lost.",
capture_block_id, key_name)
return (None, key_errors)
else:
logger.error(
"Error writing to %s/%s in S3",
capture_block_id, key_name, exc_info=True)
return (None, key_errors)
if written_bytes != file_size:
logger.error(
"Incorrect number of bytes written (%d/%d) when writing RDB dump %s",
written_bytes, file_size, dump_filename)
return (None, key_errors)
return (rate_bytes, key_errors)
class DeviceStatus(enum.Enum):
IDLE = 1
QUEUED = 2
class MetaWriterServer(DeviceServer):
VERSION = "sdp-meta-writer-0.1"
BUILD_STATE = "katsdpmetawriter-" + __version__
def __init__(self, host, port, botocore_dict, rdb_path, telstate):
self._botocore_dict = botocore_dict
self._async_tasks = deque()
self._rdb_path = rdb_path
self._telstate = telstate
self._build_state_sensor = Sensor(str, "build-state", "SDP Controller build state.")
self._device_status_sensor = Sensor(
DeviceStatus, "status", "The current status of the meta writer process")
self._last_write_stream_sensor = Sensor(
str, "last-write-stream", "The stream name of the last meta data dump.")
self._last_write_cbid_sensor = Sensor(
str, "last-write-cbid", "The capture block ID of the last meta data dump.")
self._key_failures_sensor = Sensor(
int, "key-failures",
"Count of the number of failures to write a desired key to the RDB dump. "
"(prometheus: counter)")
self._last_transfer_rate = Sensor(
float, "last-transfer-rate",
"Rate of last data transfer to S3 endpoint in Bps. (prometheus: gauge)")
self._last_dump_duration = Sensor(
float, "last-dump-duration",
"Time taken to write the last dump to disk. (prometheus: gauge)", "s")
super().__init__(host, port)
self._build_state_sensor.set_value(self.BUILD_STATE)
self.sensors.add(self._build_state_sensor)
self._device_status_sensor.set_value(DeviceStatus.IDLE)
self.sensors.add(self._device_status_sensor)
self.sensors.add(self._last_write_stream_sensor)
self.sensors.add(self._last_write_cbid_sensor)
self.sensors.add(self._last_transfer_rate)
self.sensors.add(self._last_dump_duration)
self._key_failures_sensor.set_value(0)
self.sensors.add(self._key_failures_sensor)
def _fail_if_busy(self):
"""Raise a FailReply if there are too many asynchronous operations in progress."""
busy_tasks = 0
for task in self._async_tasks:
if not task.done():
busy_tasks += 1
if busy_tasks >= MAX_ASYNC_TASKS:
raise FailReply(
('Meta-data writer has too many operations in progress (max {}). '
'Please wait for one to complete first.').format(MAX_ASYNC_TASKS))
def _clear_async_task(self, future):
"""Clear the specified async task.
Parameters
----------
future : :class:`asyncio.Future`
The expected value of :attr:`_async_task`.
"""
try:
self._async_tasks.remove(future)
except IndexError:
pass
if not self._async_tasks:
self._device_status_sensor.set_value(DeviceStatus.IDLE)
async def _write_meta(self, ctx, capture_block_id, stream_name, lite=True):
"""Write meta-data extracted from the current telstate object
to a binary dump and place this in the currently connected
S3 bucket for storage.
"""
additional_name = "full." if not lite else ""
dump_folder = os.path.join(self._rdb_path, capture_block_id)
os.makedirs(dump_folder, exist_ok=True)
basename = "{}_{}.{}rdb".format(capture_block_id, stream_name, additional_name)
dump_filename = os.path.join(dump_folder, basename + '.uploading')
st = timer()
# Generate local RDB dump and write into S3 - note that
# capture_block_id is used as the bucket name for storing meta-data
# regardless of the stream selected.
# The full capture_block_stream_name is used as the bucket for payload
# data for the particular stream.
(rate_b, key_errors) = await _write_rdb(
ctx, self._telstate, dump_filename,
capture_block_id, stream_name, self._botocore_dict, basename, lite)
et = timer()
sensor_timestamp = time.time()
self._last_write_stream_sensor.set_value(stream_name, timestamp=sensor_timestamp)
self._last_write_cbid_sensor.set_value(capture_block_id, timestamp=sensor_timestamp)
self._last_dump_duration.set_value(et - st, timestamp=sensor_timestamp)
if key_errors > 0:
self._key_failures_sensor.set_value(
self._key_failures_sensor.value + key_errors,
Sensor.Status.ERROR)
if not rate_b:
try:
trawler_filename = os.path.join(dump_folder, basename)
# Prepare to rename file so that the trawler process can
# attempt the S3 upload at a later date.
os.rename(dump_filename, trawler_filename)
except FileNotFoundError:
msg = (
"Failed to store RDB dump, and couldn't find file to rename. "
"This error cannot be recovered from."
)
logger.error(msg)
raise FailReply(msg)
else:
logger.info(
"RDB file written to bucket %s with key %s",
capture_block_id, os.path.basename(dump_filename))
try:
os.remove(dump_filename)
except Exception as e:
# it won't interfere with the trawler so we just continue
logger.warning("Failed to remove transferred RDB file %s. (%s)", dump_filename, e)
return rate_b
async def write_meta(self, ctx, capture_block_id, streams, lite=True):
"""Implementation of request_write_meta."""
rate_per_stream = {}
for stream in streams:
task = asyncio.ensure_future(
self._write_meta(ctx, capture_block_id, stream, lite))
self._device_status_sensor.set_value(DeviceStatus.QUEUED)
# we risk queue depth expansion at this point, but we are really
# only checking to prevent outrageous failures.
self._async_tasks.append(task)
try:
rate_b = await task
finally:
self._clear_async_task(task)
rate_per_stream[stream] = rate_b
dump_folder = os.path.join(self._rdb_path, capture_block_id)
if not lite and os.path.exists(dump_folder):
# We treat writing the streams for a full meta dump as the
# completion of meta data for that particular capture block id
# (assuming at least one stream was written).
touch_file = os.path.join(dump_folder, "complete")
pathlib.Path(touch_file).touch(exist_ok=True)
return rate_per_stream
async def request_write_meta(
self, ctx, capture_block_id: str, lite: bool = True, stream_name: str = None) -> None:
"""Write a dump of a subset of currently active telescope state to disk and
optionally archive it to the preconfigured S3 endpoint. The precise subset
is controlled through the selection of capture_block_id, stream_name and
the lite boolean.
Method may take some time so is run asynchronously.
Parameters
----------
capture_block_id : string
The capture block id generated by master controller to identify a
specific data capture. Typically this will be an integer representing the
start time of the observation in epoch seconds (+/- to allow for
uniqueness if required).
lite : bool, optional
If True then a very limited subset of telstate keys are written to the dump,
otherwise a 'full' dump is produced. Currently 'full' is the entire telescope
state database, but in the future may be restricted to meta-data relevant
only to the chosen capture_block_id and stream_name.
stream_name : string, optional
The specific stream name to use in extracting stream specific meta-data.
(e.g. sdp_l0) If no stream is specified, all sdp.vis streams with
attached writers will be saved individually.
"""
self._fail_if_busy()
if not stream_name:
streams = await self._telstate.get('sdp_archived_streams')
if not streams:
raise FailReply(
"No stream specified, and cannot determine available streams from telstate.")
streams = [stream for stream in streams
if await self._telstate.view(stream).get('stream_type') == 'sdp.vis']
else:
streams = [stream_name]
ctx.inform(
("Starting write of {} metadata for CB: {} and Streams: {} to S3. "
"This may take a minute or two...")
.format("lightweight" if lite else "full", capture_block_id, streams))
rate_per_stream = await self.write_meta(ctx, capture_block_id, streams, lite)
peak_rate = 0
dump_type_name = "Lightweight" if lite else "Full dump"
for stream, rate_b in rate_per_stream.items():
if not rate_b:
ctx.inform(
"{} meta-data for CB: {}_{} written to local disk only"
.format(dump_type_name, capture_block_id, stream))
else:
ctx.inform(
"{} meta-data for CB: {}_{} written to S3 @ {:.2f}MBps"
.format(dump_type_name, capture_block_id, stream, rate_b / 1e6))
peak_rate = max(peak_rate, rate_b)
if peak_rate > 0:
self._last_transfer_rate.set_value(peak_rate)
| 41.887526
| 98
| 0.645169
|
0dd0b0e6dc3177a9d89a9cea82157e4d92e5abb2
| 1,502
|
py
|
Python
|
Tests/interop/net/loadorder/t3a.py
|
btddg28/ironpython
|
8006238c19d08db5db9bada39d765143e631059e
|
[
"Apache-2.0"
] | 2
|
2019-09-21T22:22:30.000Z
|
2020-05-09T12:45:51.000Z
|
Tests/interop/net/loadorder/t3a.py
|
btddg28/ironpython
|
8006238c19d08db5db9bada39d765143e631059e
|
[
"Apache-2.0"
] | null | null | null |
Tests/interop/net/loadorder/t3a.py
|
btddg28/ironpython
|
8006238c19d08db5db9bada39d765143e631059e
|
[
"Apache-2.0"
] | 1
|
2019-09-18T05:37:46.000Z
|
2019-09-18T05:37:46.000Z
|
#####################################################################################
#
# Copyright (c) Microsoft Corporation. All rights reserved.
#
# This source code is subject to terms and conditions of the Apache License, Version 2.0. A
# copy of the license can be found in the License.html file at the root of this distribution. If
# you cannot locate the Apache License, Version 2.0, please send an email to
# ironpy@microsoft.com. By using this source code in any fashion, you are agreeing to be bound
# by the terms of the Apache License, Version 2.0.
#
# You must not remove this notice, or any other, from this software.
#
#
#####################################################################################
from iptest.assert_util import *
add_clr_assemblies("loadorder_3")
# namespace First {
# public class Generic1<K, V> {
# public static string Flag = typeof(Generic1<,>).FullName;
# }
# }
import First
add_clr_assemblies("loadorder_3a")
# namespace Second {
# public class Generic1 {
# public static string Flag = typeof(Generic1).FullName;
# }
# }
import Second
AreEqual(First.Generic1[int, int].Flag, "First.Generic1`2")
AreEqual(Second.Generic1.Flag, "Second.Generic1")
from Second import Generic1
AreEqual(Generic1.Flag, "Second.Generic1")
AssertError(SystemError, lambda: Generic1[int, int])
from First import Generic1
AreEqual(Generic1[int, int].Flag, "First.Generic1`2")
AssertError(SystemError, lambda: Generic1.Flag)
| 29.45098
| 97
| 0.64514
|
6badf5c4f0ad230ab3b44cb7b3de1c008154c278
| 280,762
|
py
|
Python
|
com/vmware/nsx/node/services_client.py
|
adammillerio/vsphere-automation-sdk-python
|
c07e1be98615201139b26c28db3aa584c4254b66
|
[
"MIT"
] | null | null | null |
com/vmware/nsx/node/services_client.py
|
adammillerio/vsphere-automation-sdk-python
|
c07e1be98615201139b26c28db3aa584c4254b66
|
[
"MIT"
] | null | null | null |
com/vmware/nsx/node/services_client.py
|
adammillerio/vsphere-automation-sdk-python
|
c07e1be98615201139b26c28db3aa584c4254b66
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
#---------------------------------------------------------------------------
# Copyright 2020 VMware, Inc. All rights reserved.
# AUTO GENERATED FILE -- DO NOT MODIFY!
#
# vAPI stub file for package com.vmware.nsx.node.services.
#---------------------------------------------------------------------------
"""
"""
__author__ = 'VMware, Inc.'
__docformat__ = 'restructuredtext en'
import sys
from vmware.vapi.bindings import type
from vmware.vapi.bindings.converter import TypeConverter
from vmware.vapi.bindings.enum import Enum
from vmware.vapi.bindings.error import VapiError
from vmware.vapi.bindings.struct import VapiStruct
from vmware.vapi.bindings.stub import (
ApiInterfaceStub, StubFactoryBase, VapiInterface)
from vmware.vapi.bindings.common import raise_core_exception
from vmware.vapi.data.validator import (UnionValidator, HasFieldsOfValidator)
from vmware.vapi.exception import CoreException
from vmware.vapi.lib.constants import TaskType
from vmware.vapi.lib.rest import OperationRestMetadata
class ClusterManager(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx.node.services.cluster_manager'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _ClusterManagerStub)
self._VAPI_OPERATION_IDS = {}
def get(self):
"""
Read cluster boot manager service properties
:rtype: :class:`com.vmware.nsx.model_client.NodeServiceProperties`
:return: com.vmware.nsx.model.NodeServiceProperties
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get', None)
def restart(self):
"""
Restart, start or stop the cluster boot manager service
:rtype: :class:`com.vmware.nsx.model_client.NodeServiceStatusProperties`
:return: com.vmware.nsx.model.NodeServiceStatusProperties
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('restart', None)
def start(self):
"""
Restart, start or stop the cluster boot manager service
:rtype: :class:`com.vmware.nsx.model_client.NodeServiceStatusProperties`
:return: com.vmware.nsx.model.NodeServiceStatusProperties
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('start', None)
def stop(self):
"""
Restart, start or stop the cluster boot manager service
:rtype: :class:`com.vmware.nsx.model_client.NodeServiceStatusProperties`
:return: com.vmware.nsx.model.NodeServiceStatusProperties
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('stop', None)
class CmInventory(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx.node.services.cm_inventory'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _CmInventoryStub)
self._VAPI_OPERATION_IDS = {}
def get(self):
"""
Read cm inventory service properties
:rtype: :class:`com.vmware.nsx.model_client.NodeServiceProperties`
:return: com.vmware.nsx.model.NodeServiceProperties
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get', None)
def restart(self):
"""
Restart, start or stop the manager service
:rtype: :class:`com.vmware.nsx.model_client.NodeServiceStatusProperties`
:return: com.vmware.nsx.model.NodeServiceStatusProperties
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('restart', None)
def start(self):
"""
Restart, start or stop the manager service
:rtype: :class:`com.vmware.nsx.model_client.NodeServiceStatusProperties`
:return: com.vmware.nsx.model.NodeServiceStatusProperties
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('start', None)
def stop(self):
"""
Restart, start or stop the manager service
:rtype: :class:`com.vmware.nsx.model_client.NodeServiceStatusProperties`
:return: com.vmware.nsx.model.NodeServiceStatusProperties
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('stop', None)
class Controller(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx.node.services.controller'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _ControllerStub)
self._VAPI_OPERATION_IDS = {}
def get(self):
"""
Read controller service properties
:rtype: :class:`com.vmware.nsx.model_client.NodeServiceProperties`
:return: com.vmware.nsx.model.NodeServiceProperties
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get', None)
def restart(self):
"""
Restart, start or stop the controller service
:rtype: :class:`com.vmware.nsx.model_client.NodeServiceStatusProperties`
:return: com.vmware.nsx.model.NodeServiceStatusProperties
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('restart', None)
def start(self):
"""
Restart, start or stop the controller service
:rtype: :class:`com.vmware.nsx.model_client.NodeServiceStatusProperties`
:return: com.vmware.nsx.model.NodeServiceStatusProperties
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('start', None)
def stop(self):
"""
Restart, start or stop the controller service
:rtype: :class:`com.vmware.nsx.model_client.NodeServiceStatusProperties`
:return: com.vmware.nsx.model.NodeServiceStatusProperties
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('stop', None)
class Http(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx.node.services.http'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _HttpStub)
self._VAPI_OPERATION_IDS = {}
def applycertificate(self,
certificate_id,
):
"""
Applies a security certificate to the http service. In the POST
request, the CERTIFICATE_ID references a certificate created with the
/api/v1/trust-management APIs. Issuing this request causes the http
service to restart so that the service can begin using the new
certificate. When the POST request succeeds, it doesn't return a valid
response. The request times out because of the restart.
:type certificate_id: :class:`str`
:param certificate_id: Certificate ID (required)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('applycertificate',
{
'certificate_id': certificate_id,
})
def get(self):
"""
This API is deprecated. Read the configuration of the http service by
calling the GET /api/v1/cluster/api-service API.
:rtype: :class:`com.vmware.nsx.model_client.NodeHttpServiceProperties`
:return: com.vmware.nsx.model.NodeHttpServiceProperties
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get', None)
def restart(self):
"""
Restart the http service
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('restart', None)
def start(self):
"""
Start the http service
:rtype: :class:`com.vmware.nsx.model_client.NodeServiceStatusProperties`
:return: com.vmware.nsx.model.NodeServiceStatusProperties
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('start', None)
def stop(self):
"""
Stop the http service
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('stop', None)
def update(self,
node_http_service_properties,
):
"""
This API is deprecated. Make changes to the http service configuration
by calling the PUT /api/v1/cluster/api-service API.
:type node_http_service_properties: :class:`com.vmware.nsx.model_client.NodeHttpServiceProperties`
:param node_http_service_properties: (required)
:rtype: :class:`com.vmware.nsx.model_client.NodeHttpServiceProperties`
:return: com.vmware.nsx.model.NodeHttpServiceProperties
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('update',
{
'node_http_service_properties': node_http_service_properties,
})
class InstallUpgrade(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx.node.services.install_upgrade'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _InstallUpgradeStub)
self._VAPI_OPERATION_IDS = {}
def get(self):
"""
Read NSX install-upgrade service properties
:rtype: :class:`com.vmware.nsx.model_client.NodeInstallUpgradeServiceProperties`
:return: com.vmware.nsx.model.NodeInstallUpgradeServiceProperties
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get', None)
def restart(self):
"""
Restart, start or stop the NSX install-upgrade service
:rtype: :class:`com.vmware.nsx.model_client.NodeServiceStatusProperties`
:return: com.vmware.nsx.model.NodeServiceStatusProperties
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.ConcurrentChange`
Conflict
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('restart', None)
def start(self):
"""
Restart, start or stop the NSX install-upgrade service
:rtype: :class:`com.vmware.nsx.model_client.NodeServiceStatusProperties`
:return: com.vmware.nsx.model.NodeServiceStatusProperties
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.ConcurrentChange`
Conflict
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('start', None)
def stop(self):
"""
Restart, start or stop the NSX install-upgrade service
:rtype: :class:`com.vmware.nsx.model_client.NodeServiceStatusProperties`
:return: com.vmware.nsx.model.NodeServiceStatusProperties
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.ConcurrentChange`
Conflict
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('stop', None)
def update(self,
node_install_upgrade_service_properties,
):
"""
Update NSX install-upgrade service properties
:type node_install_upgrade_service_properties: :class:`com.vmware.nsx.model_client.NodeInstallUpgradeServiceProperties`
:param node_install_upgrade_service_properties: (required)
:rtype: :class:`com.vmware.nsx.model_client.NodeInstallUpgradeServiceProperties`
:return: com.vmware.nsx.model.NodeInstallUpgradeServiceProperties
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.ConcurrentChange`
Conflict
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('update',
{
'node_install_upgrade_service_properties': node_install_upgrade_service_properties,
})
class Liagent(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx.node.services.liagent'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _LiagentStub)
self._VAPI_OPERATION_IDS = {}
def get(self):
"""
Read liagent service properties
:rtype: :class:`com.vmware.nsx.model_client.NodeServiceProperties`
:return: com.vmware.nsx.model.NodeServiceProperties
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get', None)
def restart(self):
"""
Restart, start or stop the liagent service
:rtype: :class:`com.vmware.nsx.model_client.NodeServiceStatusProperties`
:return: com.vmware.nsx.model.NodeServiceStatusProperties
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('restart', None)
def start(self):
"""
Restart, start or stop the liagent service
:rtype: :class:`com.vmware.nsx.model_client.NodeServiceStatusProperties`
:return: com.vmware.nsx.model.NodeServiceStatusProperties
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('start', None)
def stop(self):
"""
Restart, start or stop the liagent service
:rtype: :class:`com.vmware.nsx.model_client.NodeServiceStatusProperties`
:return: com.vmware.nsx.model.NodeServiceStatusProperties
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('stop', None)
class Manager(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx.node.services.manager'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _ManagerStub)
self._VAPI_OPERATION_IDS = {}
def get(self):
"""
Read service properties
:rtype: :class:`com.vmware.nsx.model_client.NodeProtonServiceProperties`
:return: com.vmware.nsx.model.NodeProtonServiceProperties
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get', None)
def resetmanagerlogginglevels(self):
"""
Reset the logging levels to default values
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('resetmanagerlogginglevels', None)
def restart(self):
"""
Restart, start or stop the service
:rtype: :class:`com.vmware.nsx.model_client.NodeServiceStatusProperties`
:return: com.vmware.nsx.model.NodeServiceStatusProperties
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('restart', None)
def start(self):
"""
Restart, start or stop the service
:rtype: :class:`com.vmware.nsx.model_client.NodeServiceStatusProperties`
:return: com.vmware.nsx.model.NodeServiceStatusProperties
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('start', None)
def stop(self):
"""
Restart, start or stop the service
:rtype: :class:`com.vmware.nsx.model_client.NodeServiceStatusProperties`
:return: com.vmware.nsx.model.NodeServiceStatusProperties
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('stop', None)
def update(self,
node_proton_service_properties,
):
"""
Update service properties
:type node_proton_service_properties: :class:`com.vmware.nsx.model_client.NodeProtonServiceProperties`
:param node_proton_service_properties: (required)
:rtype: :class:`com.vmware.nsx.model_client.NodeProtonServiceProperties`
:return: com.vmware.nsx.model.NodeProtonServiceProperties
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('update',
{
'node_proton_service_properties': node_proton_service_properties,
})
class MgmtPlaneBus(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx.node.services.mgmt_plane_bus'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _MgmtPlaneBusStub)
self._VAPI_OPERATION_IDS = {}
def get(self):
"""
Read Rabbit MQ service properties
:rtype: :class:`com.vmware.nsx.model_client.NodeServiceProperties`
:return: com.vmware.nsx.model.NodeServiceProperties
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get', None)
def restart(self):
"""
Restart, start or stop the Rabbit MQ service
:rtype: :class:`com.vmware.nsx.model_client.NodeServiceStatusProperties`
:return: com.vmware.nsx.model.NodeServiceStatusProperties
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('restart', None)
def start(self):
"""
Restart, start or stop the Rabbit MQ service
:rtype: :class:`com.vmware.nsx.model_client.NodeServiceStatusProperties`
:return: com.vmware.nsx.model.NodeServiceStatusProperties
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('start', None)
def stop(self):
"""
Restart, start or stop the Rabbit MQ service
:rtype: :class:`com.vmware.nsx.model_client.NodeServiceStatusProperties`
:return: com.vmware.nsx.model.NodeServiceStatusProperties
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('stop', None)
class MigrationCoordinator(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx.node.services.migration_coordinator'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _MigrationCoordinatorStub)
self._VAPI_OPERATION_IDS = {}
def get(self):
"""
Read migration coordinator service properties
:rtype: :class:`com.vmware.nsx.model_client.NodeServiceProperties`
:return: com.vmware.nsx.model.NodeServiceProperties
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get', None)
def restart(self):
"""
Restart, start or stop the migration coordinator service
:rtype: :class:`com.vmware.nsx.model_client.NodeServiceStatusProperties`
:return: com.vmware.nsx.model.NodeServiceStatusProperties
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('restart', None)
def start(self):
"""
Restart, start or stop the migration coordinator service
:rtype: :class:`com.vmware.nsx.model_client.NodeServiceStatusProperties`
:return: com.vmware.nsx.model.NodeServiceStatusProperties
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('start', None)
def stop(self):
"""
Restart, start or stop the migration coordinator service
:rtype: :class:`com.vmware.nsx.model_client.NodeServiceStatusProperties`
:return: com.vmware.nsx.model.NodeServiceStatusProperties
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('stop', None)
class NodeMgmt(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx.node.services.node_mgmt'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _NodeMgmtStub)
self._VAPI_OPERATION_IDS = {}
def get(self):
"""
Read appliance management service properties
:rtype: :class:`com.vmware.nsx.model_client.NodeServiceProperties`
:return: com.vmware.nsx.model.NodeServiceProperties
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get', None)
def restart(self):
"""
Restart the node management service
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('restart', None)
class NodeStats(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx.node.services.node_stats'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _NodeStatsStub)
self._VAPI_OPERATION_IDS = {}
def get(self):
"""
Read NSX node-stats service properties
:rtype: :class:`com.vmware.nsx.model_client.NodeServiceProperties`
:return: com.vmware.nsx.model.NodeServiceProperties
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get', None)
def restart(self):
"""
Restart, start or stop the NSX node-stats service
:rtype: :class:`com.vmware.nsx.model_client.NodeServiceStatusProperties`
:return: com.vmware.nsx.model.NodeServiceStatusProperties
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('restart', None)
def start(self):
"""
Restart, start or stop the NSX node-stats service
:rtype: :class:`com.vmware.nsx.model_client.NodeServiceStatusProperties`
:return: com.vmware.nsx.model.NodeServiceStatusProperties
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('start', None)
def stop(self):
"""
Restart, start or stop the NSX node-stats service
:rtype: :class:`com.vmware.nsx.model_client.NodeServiceStatusProperties`
:return: com.vmware.nsx.model.NodeServiceStatusProperties
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('stop', None)
class NsxMessageBus(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx.node.services.nsx_message_bus'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _NsxMessageBusStub)
self._VAPI_OPERATION_IDS = {}
def get(self):
"""
Read NSX Message Bus service properties
:rtype: :class:`com.vmware.nsx.model_client.NodeServiceProperties`
:return: com.vmware.nsx.model.NodeServiceProperties
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get', None)
def restart(self):
"""
Restart, start or stop the NSX Message Bus service
:rtype: :class:`com.vmware.nsx.model_client.NodeServiceStatusProperties`
:return: com.vmware.nsx.model.NodeServiceStatusProperties
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('restart', None)
def start(self):
"""
Restart, start or stop the NSX Message Bus service
:rtype: :class:`com.vmware.nsx.model_client.NodeServiceStatusProperties`
:return: com.vmware.nsx.model.NodeServiceStatusProperties
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('start', None)
def stop(self):
"""
Restart, start or stop the NSX Message Bus service
:rtype: :class:`com.vmware.nsx.model_client.NodeServiceStatusProperties`
:return: com.vmware.nsx.model.NodeServiceStatusProperties
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('stop', None)
class NsxPlatformClient(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx.node.services.nsx_platform_client'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _NsxPlatformClientStub)
self._VAPI_OPERATION_IDS = {}
def get(self):
"""
Read NSX Platform Client service properties
:rtype: :class:`com.vmware.nsx.model_client.NodeServiceProperties`
:return: com.vmware.nsx.model.NodeServiceProperties
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get', None)
def restart(self):
"""
Restart, start or stop the NSX Platform Client service
:rtype: :class:`com.vmware.nsx.model_client.NodeServiceStatusProperties`
:return: com.vmware.nsx.model.NodeServiceStatusProperties
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('restart', None)
def start(self):
"""
Restart, start or stop the NSX Platform Client service
:rtype: :class:`com.vmware.nsx.model_client.NodeServiceStatusProperties`
:return: com.vmware.nsx.model.NodeServiceStatusProperties
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('start', None)
def stop(self):
"""
Restart, start or stop the NSX Platform Client service
:rtype: :class:`com.vmware.nsx.model_client.NodeServiceStatusProperties`
:return: com.vmware.nsx.model.NodeServiceStatusProperties
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('stop', None)
class NsxUpgradeAgent(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx.node.services.nsx_upgrade_agent'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _NsxUpgradeAgentStub)
self._VAPI_OPERATION_IDS = {}
def get(self):
"""
Read NSX upgrade Agent service properties
:rtype: :class:`com.vmware.nsx.model_client.NodeServiceProperties`
:return: com.vmware.nsx.model.NodeServiceProperties
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get', None)
def restart(self):
"""
Restart, start or stop the NSX upgrade agent service
:rtype: :class:`com.vmware.nsx.model_client.NodeServiceStatusProperties`
:return: com.vmware.nsx.model.NodeServiceStatusProperties
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('restart', None)
def start(self):
"""
Restart, start or stop the NSX upgrade agent service
:rtype: :class:`com.vmware.nsx.model_client.NodeServiceStatusProperties`
:return: com.vmware.nsx.model.NodeServiceStatusProperties
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('start', None)
def stop(self):
"""
Restart, start or stop the NSX upgrade agent service
:rtype: :class:`com.vmware.nsx.model_client.NodeServiceStatusProperties`
:return: com.vmware.nsx.model.NodeServiceStatusProperties
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('stop', None)
class Ntp(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx.node.services.ntp'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _NtpStub)
self._VAPI_OPERATION_IDS = {}
def get(self):
"""
Read NTP service properties
:rtype: :class:`com.vmware.nsx.model_client.NodeNtpServiceProperties`
:return: com.vmware.nsx.model.NodeNtpServiceProperties
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get', None)
def restart(self):
"""
Restart, start or stop the NTP service
:rtype: :class:`com.vmware.nsx.model_client.NodeServiceStatusProperties`
:return: com.vmware.nsx.model.NodeServiceStatusProperties
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('restart', None)
def start(self):
"""
Restart, start or stop the NTP service
:rtype: :class:`com.vmware.nsx.model_client.NodeServiceStatusProperties`
:return: com.vmware.nsx.model.NodeServiceStatusProperties
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('start', None)
def stop(self):
"""
Restart, start or stop the NTP service
:rtype: :class:`com.vmware.nsx.model_client.NodeServiceStatusProperties`
:return: com.vmware.nsx.model.NodeServiceStatusProperties
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('stop', None)
def update(self,
node_ntp_service_properties,
):
"""
Update NTP service properties
:type node_ntp_service_properties: :class:`com.vmware.nsx.model_client.NodeNtpServiceProperties`
:param node_ntp_service_properties: (required)
:rtype: :class:`com.vmware.nsx.model_client.NodeNtpServiceProperties`
:return: com.vmware.nsx.model.NodeNtpServiceProperties
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('update',
{
'node_ntp_service_properties': node_ntp_service_properties,
})
class Policy(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx.node.services.policy'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _PolicyStub)
self._VAPI_OPERATION_IDS = {}
def get(self):
"""
Read service properties
:rtype: :class:`com.vmware.nsx.model_client.NodePolicyServiceProperties`
:return: com.vmware.nsx.model.NodePolicyServiceProperties
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get', None)
def resetmanagerlogginglevels(self):
"""
Reset the logging levels to default values
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('resetmanagerlogginglevels', None)
def restart(self):
"""
Restart, start or stop the service
:rtype: :class:`com.vmware.nsx.model_client.NodeServiceStatusProperties`
:return: com.vmware.nsx.model.NodeServiceStatusProperties
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('restart', None)
def start(self):
"""
Restart, start or stop the service
:rtype: :class:`com.vmware.nsx.model_client.NodeServiceStatusProperties`
:return: com.vmware.nsx.model.NodeServiceStatusProperties
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('start', None)
def stop(self):
"""
Restart, start or stop the service
:rtype: :class:`com.vmware.nsx.model_client.NodeServiceStatusProperties`
:return: com.vmware.nsx.model.NodeServiceStatusProperties
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('stop', None)
def update(self,
node_policy_service_properties,
):
"""
Update service properties
:type node_policy_service_properties: :class:`com.vmware.nsx.model_client.NodePolicyServiceProperties`
:param node_policy_service_properties: (required)
:rtype: :class:`com.vmware.nsx.model_client.NodePolicyServiceProperties`
:return: com.vmware.nsx.model.NodePolicyServiceProperties
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('update',
{
'node_policy_service_properties': node_policy_service_properties,
})
class Search(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx.node.services.search'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _SearchStub)
self._VAPI_OPERATION_IDS = {}
def get(self):
"""
Read NSX Search service properties
:rtype: :class:`com.vmware.nsx.model_client.NodeServiceProperties`
:return: com.vmware.nsx.model.NodeServiceProperties
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get', None)
def restart(self):
"""
Restart, start or stop the NSX Search service
:rtype: :class:`com.vmware.nsx.model_client.NodeServiceStatusProperties`
:return: com.vmware.nsx.model.NodeServiceStatusProperties
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('restart', None)
def start(self):
"""
Restart, start or stop the NSX Search service
:rtype: :class:`com.vmware.nsx.model_client.NodeServiceStatusProperties`
:return: com.vmware.nsx.model.NodeServiceStatusProperties
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('start', None)
def stop(self):
"""
Restart, start or stop the NSX Search service
:rtype: :class:`com.vmware.nsx.model_client.NodeServiceStatusProperties`
:return: com.vmware.nsx.model.NodeServiceStatusProperties
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('stop', None)
class Snmp(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx.node.services.snmp'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _SnmpStub)
self._VAPI_OPERATION_IDS = {}
def get(self):
"""
Read SNMP service properties
:rtype: :class:`com.vmware.nsx.model_client.NodeSnmpServiceProperties`
:return: com.vmware.nsx.model.NodeSnmpServiceProperties
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get', None)
def restart(self):
"""
Restart, start or stop the SNMP service
:rtype: :class:`com.vmware.nsx.model_client.NodeServiceStatusProperties`
:return: com.vmware.nsx.model.NodeServiceStatusProperties
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('restart', None)
def start(self):
"""
Restart, start or stop the SNMP service
:rtype: :class:`com.vmware.nsx.model_client.NodeServiceStatusProperties`
:return: com.vmware.nsx.model.NodeServiceStatusProperties
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('start', None)
def stop(self):
"""
Restart, start or stop the SNMP service
:rtype: :class:`com.vmware.nsx.model_client.NodeServiceStatusProperties`
:return: com.vmware.nsx.model.NodeServiceStatusProperties
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('stop', None)
def update(self,
node_snmp_service_properties,
):
"""
Update SNMP service properties
:type node_snmp_service_properties: :class:`com.vmware.nsx.model_client.NodeSnmpServiceProperties`
:param node_snmp_service_properties: (required)
:rtype: :class:`com.vmware.nsx.model_client.NodeSnmpServiceProperties`
:return: com.vmware.nsx.model.NodeSnmpServiceProperties
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.ConcurrentChange`
Conflict
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('update',
{
'node_snmp_service_properties': node_snmp_service_properties,
})
class Ssh(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx.node.services.ssh'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _SshStub)
self._VAPI_OPERATION_IDS = {}
def get(self):
"""
Read ssh service properties
:rtype: :class:`com.vmware.nsx.model_client.NodeSshServiceProperties`
:return: com.vmware.nsx.model.NodeSshServiceProperties
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get', None)
def removehostfingerprint(self,
known_host_parameter,
):
"""
Remove a host's fingerprint from known hosts file
:type known_host_parameter: :class:`com.vmware.nsx.model_client.KnownHostParameter`
:param known_host_parameter: (required)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('removehostfingerprint',
{
'known_host_parameter': known_host_parameter,
})
def restart(self):
"""
Restart, start or stop the ssh service
:rtype: :class:`com.vmware.nsx.model_client.NodeServiceStatusProperties`
:return: com.vmware.nsx.model.NodeServiceStatusProperties
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('restart', None)
def start(self):
"""
Restart, start or stop the ssh service
:rtype: :class:`com.vmware.nsx.model_client.NodeServiceStatusProperties`
:return: com.vmware.nsx.model.NodeServiceStatusProperties
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('start', None)
def stop(self):
"""
Restart, start or stop the ssh service
:rtype: :class:`com.vmware.nsx.model_client.NodeServiceStatusProperties`
:return: com.vmware.nsx.model.NodeServiceStatusProperties
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('stop', None)
def update(self,
node_ssh_service_properties,
):
"""
Update ssh service properties. If the start_on_boot property is updated
to true, existing ssh sessions if any are stopped and the ssh service
is restarted.
:type node_ssh_service_properties: :class:`com.vmware.nsx.model_client.NodeSshServiceProperties`
:param node_ssh_service_properties: (required)
:rtype: :class:`com.vmware.nsx.model_client.NodeSshServiceProperties`
:return: com.vmware.nsx.model.NodeSshServiceProperties
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('update',
{
'node_ssh_service_properties': node_ssh_service_properties,
})
class Syslog(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx.node.services.syslog'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _SyslogStub)
self._VAPI_OPERATION_IDS = {}
def get(self):
"""
Read syslog service properties
:rtype: :class:`com.vmware.nsx.model_client.NodeServiceProperties`
:return: com.vmware.nsx.model.NodeServiceProperties
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get', None)
def restart(self):
"""
Restart, start or stop the syslog service
:rtype: :class:`com.vmware.nsx.model_client.NodeServiceStatusProperties`
:return: com.vmware.nsx.model.NodeServiceStatusProperties
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('restart', None)
def start(self):
"""
Restart, start or stop the syslog service
:rtype: :class:`com.vmware.nsx.model_client.NodeServiceStatusProperties`
:return: com.vmware.nsx.model.NodeServiceStatusProperties
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('start', None)
def stop(self):
"""
Restart, start or stop the syslog service
:rtype: :class:`com.vmware.nsx.model_client.NodeServiceStatusProperties`
:return: com.vmware.nsx.model.NodeServiceStatusProperties
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('stop', None)
class Telemetry(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx.node.services.telemetry'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _TelemetryStub)
self._VAPI_OPERATION_IDS = {}
def get(self):
"""
Read Telemetry service properties
:rtype: :class:`com.vmware.nsx.model_client.NodeServiceProperties`
:return: com.vmware.nsx.model.NodeServiceProperties
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get', None)
def restart(self):
"""
Restart, start or stop Telemetry service
:rtype: :class:`com.vmware.nsx.model_client.NodeServiceStatusProperties`
:return: com.vmware.nsx.model.NodeServiceStatusProperties
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('restart', None)
def start(self):
"""
Restart, start or stop Telemetry service
:rtype: :class:`com.vmware.nsx.model_client.NodeServiceStatusProperties`
:return: com.vmware.nsx.model.NodeServiceStatusProperties
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('start', None)
def stop(self):
"""
Restart, start or stop Telemetry service
:rtype: :class:`com.vmware.nsx.model_client.NodeServiceStatusProperties`
:return: com.vmware.nsx.model.NodeServiceStatusProperties
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('stop', None)
class UiService(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx.node.services.ui_service'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _UiServiceStub)
self._VAPI_OPERATION_IDS = {}
def get(self):
"""
Read ui service properties
:rtype: :class:`com.vmware.nsx.model_client.NodeServiceProperties`
:return: com.vmware.nsx.model.NodeServiceProperties
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get', None)
def restart(self):
"""
Restart, Start and Stop the ui service
:rtype: :class:`com.vmware.nsx.model_client.NodeServiceStatusProperties`
:return: com.vmware.nsx.model.NodeServiceStatusProperties
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('restart', None)
def start(self):
"""
Restart, Start and Stop the ui service
:rtype: :class:`com.vmware.nsx.model_client.NodeServiceStatusProperties`
:return: com.vmware.nsx.model.NodeServiceStatusProperties
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('start', None)
def stop(self):
"""
Restart, Start and Stop the ui service
:rtype: :class:`com.vmware.nsx.model_client.NodeServiceStatusProperties`
:return: com.vmware.nsx.model.NodeServiceStatusProperties
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('stop', None)
class _ClusterManagerStub(ApiInterfaceStub):
def __init__(self, config):
# properties for get operation
get_input_type = type.StructType('operation-input', {})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/node/services/cluster_manager',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for restart operation
restart_input_type = type.StructType('operation-input', {})
restart_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
restart_input_value_validator_list = [
]
restart_output_validator_list = [
]
restart_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/node/services/cluster_manager?action=restart',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for start operation
start_input_type = type.StructType('operation-input', {})
start_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
start_input_value_validator_list = [
]
start_output_validator_list = [
]
start_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/node/services/cluster_manager?action=start',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for stop operation
stop_input_type = type.StructType('operation-input', {})
stop_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
stop_input_value_validator_list = [
]
stop_output_validator_list = [
]
stop_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/node/services/cluster_manager?action=stop',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
operations = {
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NodeServiceProperties'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'restart': {
'input_type': restart_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NodeServiceStatusProperties'),
'errors': restart_error_dict,
'input_value_validator_list': restart_input_value_validator_list,
'output_validator_list': restart_output_validator_list,
'task_type': TaskType.NONE,
},
'start': {
'input_type': start_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NodeServiceStatusProperties'),
'errors': start_error_dict,
'input_value_validator_list': start_input_value_validator_list,
'output_validator_list': start_output_validator_list,
'task_type': TaskType.NONE,
},
'stop': {
'input_type': stop_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NodeServiceStatusProperties'),
'errors': stop_error_dict,
'input_value_validator_list': stop_input_value_validator_list,
'output_validator_list': stop_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'get': get_rest_metadata,
'restart': restart_rest_metadata,
'start': start_rest_metadata,
'stop': stop_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx.node.services.cluster_manager',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _CmInventoryStub(ApiInterfaceStub):
def __init__(self, config):
# properties for get operation
get_input_type = type.StructType('operation-input', {})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/node/services/cm-inventory',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for restart operation
restart_input_type = type.StructType('operation-input', {})
restart_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
restart_input_value_validator_list = [
]
restart_output_validator_list = [
]
restart_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/node/services/cm-inventory?action=restart',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for start operation
start_input_type = type.StructType('operation-input', {})
start_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
start_input_value_validator_list = [
]
start_output_validator_list = [
]
start_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/node/services/cm-inventory?action=start',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for stop operation
stop_input_type = type.StructType('operation-input', {})
stop_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
stop_input_value_validator_list = [
]
stop_output_validator_list = [
]
stop_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/node/services/cm-inventory?action=stop',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
operations = {
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NodeServiceProperties'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'restart': {
'input_type': restart_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NodeServiceStatusProperties'),
'errors': restart_error_dict,
'input_value_validator_list': restart_input_value_validator_list,
'output_validator_list': restart_output_validator_list,
'task_type': TaskType.NONE,
},
'start': {
'input_type': start_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NodeServiceStatusProperties'),
'errors': start_error_dict,
'input_value_validator_list': start_input_value_validator_list,
'output_validator_list': start_output_validator_list,
'task_type': TaskType.NONE,
},
'stop': {
'input_type': stop_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NodeServiceStatusProperties'),
'errors': stop_error_dict,
'input_value_validator_list': stop_input_value_validator_list,
'output_validator_list': stop_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'get': get_rest_metadata,
'restart': restart_rest_metadata,
'start': start_rest_metadata,
'stop': stop_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx.node.services.cm_inventory',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _ControllerStub(ApiInterfaceStub):
def __init__(self, config):
# properties for get operation
get_input_type = type.StructType('operation-input', {})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/node/services/controller',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for restart operation
restart_input_type = type.StructType('operation-input', {})
restart_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
restart_input_value_validator_list = [
]
restart_output_validator_list = [
]
restart_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/node/services/controller?action=restart',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for start operation
start_input_type = type.StructType('operation-input', {})
start_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
start_input_value_validator_list = [
]
start_output_validator_list = [
]
start_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/node/services/controller?action=start',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for stop operation
stop_input_type = type.StructType('operation-input', {})
stop_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
stop_input_value_validator_list = [
]
stop_output_validator_list = [
]
stop_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/node/services/controller?action=stop',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
operations = {
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NodeServiceProperties'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'restart': {
'input_type': restart_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NodeServiceStatusProperties'),
'errors': restart_error_dict,
'input_value_validator_list': restart_input_value_validator_list,
'output_validator_list': restart_output_validator_list,
'task_type': TaskType.NONE,
},
'start': {
'input_type': start_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NodeServiceStatusProperties'),
'errors': start_error_dict,
'input_value_validator_list': start_input_value_validator_list,
'output_validator_list': start_output_validator_list,
'task_type': TaskType.NONE,
},
'stop': {
'input_type': stop_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NodeServiceStatusProperties'),
'errors': stop_error_dict,
'input_value_validator_list': stop_input_value_validator_list,
'output_validator_list': stop_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'get': get_rest_metadata,
'restart': restart_rest_metadata,
'start': start_rest_metadata,
'stop': stop_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx.node.services.controller',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _HttpStub(ApiInterfaceStub):
def __init__(self, config):
# properties for applycertificate operation
applycertificate_input_type = type.StructType('operation-input', {
'certificate_id': type.StringType(),
})
applycertificate_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
applycertificate_input_value_validator_list = [
]
applycertificate_output_validator_list = [
]
applycertificate_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/node/services/http?action=apply_certificate',
path_variables={
},
query_parameters={
'certificate_id': 'certificate_id',
},
content_type='application/json'
)
# properties for get operation
get_input_type = type.StructType('operation-input', {})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/node/services/http',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for restart operation
restart_input_type = type.StructType('operation-input', {})
restart_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
restart_input_value_validator_list = [
]
restart_output_validator_list = [
]
restart_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/node/services/http?action=restart',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for start operation
start_input_type = type.StructType('operation-input', {})
start_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
start_input_value_validator_list = [
]
start_output_validator_list = [
]
start_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/node/services/http?action=start',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for stop operation
stop_input_type = type.StructType('operation-input', {})
stop_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
stop_input_value_validator_list = [
]
stop_output_validator_list = [
]
stop_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/node/services/http?action=stop',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for update operation
update_input_type = type.StructType('operation-input', {
'node_http_service_properties': type.ReferenceType('com.vmware.nsx.model_client', 'NodeHttpServiceProperties'),
})
update_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
update_input_value_validator_list = [
]
update_output_validator_list = [
]
update_rest_metadata = OperationRestMetadata(
http_method='PUT',
url_template='/api/v1/node/services/http',
request_body_parameter='node_http_service_properties',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
operations = {
'applycertificate': {
'input_type': applycertificate_input_type,
'output_type': type.VoidType(),
'errors': applycertificate_error_dict,
'input_value_validator_list': applycertificate_input_value_validator_list,
'output_validator_list': applycertificate_output_validator_list,
'task_type': TaskType.NONE,
},
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NodeHttpServiceProperties'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'restart': {
'input_type': restart_input_type,
'output_type': type.VoidType(),
'errors': restart_error_dict,
'input_value_validator_list': restart_input_value_validator_list,
'output_validator_list': restart_output_validator_list,
'task_type': TaskType.NONE,
},
'start': {
'input_type': start_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NodeServiceStatusProperties'),
'errors': start_error_dict,
'input_value_validator_list': start_input_value_validator_list,
'output_validator_list': start_output_validator_list,
'task_type': TaskType.NONE,
},
'stop': {
'input_type': stop_input_type,
'output_type': type.VoidType(),
'errors': stop_error_dict,
'input_value_validator_list': stop_input_value_validator_list,
'output_validator_list': stop_output_validator_list,
'task_type': TaskType.NONE,
},
'update': {
'input_type': update_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NodeHttpServiceProperties'),
'errors': update_error_dict,
'input_value_validator_list': update_input_value_validator_list,
'output_validator_list': update_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'applycertificate': applycertificate_rest_metadata,
'get': get_rest_metadata,
'restart': restart_rest_metadata,
'start': start_rest_metadata,
'stop': stop_rest_metadata,
'update': update_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx.node.services.http',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _InstallUpgradeStub(ApiInterfaceStub):
def __init__(self, config):
# properties for get operation
get_input_type = type.StructType('operation-input', {})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/node/services/install-upgrade',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for restart operation
restart_input_type = type.StructType('operation-input', {})
restart_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.concurrent_change':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ConcurrentChange'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
restart_input_value_validator_list = [
]
restart_output_validator_list = [
]
restart_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/node/services/install-upgrade?action=restart',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for start operation
start_input_type = type.StructType('operation-input', {})
start_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.concurrent_change':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ConcurrentChange'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
start_input_value_validator_list = [
]
start_output_validator_list = [
]
start_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/node/services/install-upgrade?action=start',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for stop operation
stop_input_type = type.StructType('operation-input', {})
stop_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.concurrent_change':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ConcurrentChange'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
stop_input_value_validator_list = [
]
stop_output_validator_list = [
]
stop_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/node/services/install-upgrade?action=stop',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for update operation
update_input_type = type.StructType('operation-input', {
'node_install_upgrade_service_properties': type.ReferenceType('com.vmware.nsx.model_client', 'NodeInstallUpgradeServiceProperties'),
})
update_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.concurrent_change':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ConcurrentChange'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
update_input_value_validator_list = [
]
update_output_validator_list = [
]
update_rest_metadata = OperationRestMetadata(
http_method='PUT',
url_template='/api/v1/node/services/install-upgrade',
request_body_parameter='node_install_upgrade_service_properties',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
operations = {
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NodeInstallUpgradeServiceProperties'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'restart': {
'input_type': restart_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NodeServiceStatusProperties'),
'errors': restart_error_dict,
'input_value_validator_list': restart_input_value_validator_list,
'output_validator_list': restart_output_validator_list,
'task_type': TaskType.NONE,
},
'start': {
'input_type': start_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NodeServiceStatusProperties'),
'errors': start_error_dict,
'input_value_validator_list': start_input_value_validator_list,
'output_validator_list': start_output_validator_list,
'task_type': TaskType.NONE,
},
'stop': {
'input_type': stop_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NodeServiceStatusProperties'),
'errors': stop_error_dict,
'input_value_validator_list': stop_input_value_validator_list,
'output_validator_list': stop_output_validator_list,
'task_type': TaskType.NONE,
},
'update': {
'input_type': update_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NodeInstallUpgradeServiceProperties'),
'errors': update_error_dict,
'input_value_validator_list': update_input_value_validator_list,
'output_validator_list': update_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'get': get_rest_metadata,
'restart': restart_rest_metadata,
'start': start_rest_metadata,
'stop': stop_rest_metadata,
'update': update_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx.node.services.install_upgrade',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _LiagentStub(ApiInterfaceStub):
def __init__(self, config):
# properties for get operation
get_input_type = type.StructType('operation-input', {})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/node/services/liagent',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for restart operation
restart_input_type = type.StructType('operation-input', {})
restart_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
restart_input_value_validator_list = [
]
restart_output_validator_list = [
]
restart_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/node/services/liagent?action=restart',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for start operation
start_input_type = type.StructType('operation-input', {})
start_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
start_input_value_validator_list = [
]
start_output_validator_list = [
]
start_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/node/services/liagent?action=start',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for stop operation
stop_input_type = type.StructType('operation-input', {})
stop_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
stop_input_value_validator_list = [
]
stop_output_validator_list = [
]
stop_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/node/services/liagent?action=stop',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
operations = {
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NodeServiceProperties'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'restart': {
'input_type': restart_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NodeServiceStatusProperties'),
'errors': restart_error_dict,
'input_value_validator_list': restart_input_value_validator_list,
'output_validator_list': restart_output_validator_list,
'task_type': TaskType.NONE,
},
'start': {
'input_type': start_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NodeServiceStatusProperties'),
'errors': start_error_dict,
'input_value_validator_list': start_input_value_validator_list,
'output_validator_list': start_output_validator_list,
'task_type': TaskType.NONE,
},
'stop': {
'input_type': stop_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NodeServiceStatusProperties'),
'errors': stop_error_dict,
'input_value_validator_list': stop_input_value_validator_list,
'output_validator_list': stop_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'get': get_rest_metadata,
'restart': restart_rest_metadata,
'start': start_rest_metadata,
'stop': stop_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx.node.services.liagent',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _ManagerStub(ApiInterfaceStub):
def __init__(self, config):
# properties for get operation
get_input_type = type.StructType('operation-input', {})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/node/services/manager',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for resetmanagerlogginglevels operation
resetmanagerlogginglevels_input_type = type.StructType('operation-input', {})
resetmanagerlogginglevels_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
resetmanagerlogginglevels_input_value_validator_list = [
]
resetmanagerlogginglevels_output_validator_list = [
]
resetmanagerlogginglevels_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/node/services/manager?action=reset-manager-logging-levels',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for restart operation
restart_input_type = type.StructType('operation-input', {})
restart_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
restart_input_value_validator_list = [
]
restart_output_validator_list = [
]
restart_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/node/services/manager?action=restart',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for start operation
start_input_type = type.StructType('operation-input', {})
start_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
start_input_value_validator_list = [
]
start_output_validator_list = [
]
start_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/node/services/manager?action=start',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for stop operation
stop_input_type = type.StructType('operation-input', {})
stop_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
stop_input_value_validator_list = [
]
stop_output_validator_list = [
]
stop_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/node/services/manager?action=stop',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for update operation
update_input_type = type.StructType('operation-input', {
'node_proton_service_properties': type.ReferenceType('com.vmware.nsx.model_client', 'NodeProtonServiceProperties'),
})
update_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
update_input_value_validator_list = [
]
update_output_validator_list = [
]
update_rest_metadata = OperationRestMetadata(
http_method='PUT',
url_template='/api/v1/node/services/manager',
request_body_parameter='node_proton_service_properties',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
operations = {
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NodeProtonServiceProperties'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'resetmanagerlogginglevels': {
'input_type': resetmanagerlogginglevels_input_type,
'output_type': type.VoidType(),
'errors': resetmanagerlogginglevels_error_dict,
'input_value_validator_list': resetmanagerlogginglevels_input_value_validator_list,
'output_validator_list': resetmanagerlogginglevels_output_validator_list,
'task_type': TaskType.NONE,
},
'restart': {
'input_type': restart_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NodeServiceStatusProperties'),
'errors': restart_error_dict,
'input_value_validator_list': restart_input_value_validator_list,
'output_validator_list': restart_output_validator_list,
'task_type': TaskType.NONE,
},
'start': {
'input_type': start_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NodeServiceStatusProperties'),
'errors': start_error_dict,
'input_value_validator_list': start_input_value_validator_list,
'output_validator_list': start_output_validator_list,
'task_type': TaskType.NONE,
},
'stop': {
'input_type': stop_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NodeServiceStatusProperties'),
'errors': stop_error_dict,
'input_value_validator_list': stop_input_value_validator_list,
'output_validator_list': stop_output_validator_list,
'task_type': TaskType.NONE,
},
'update': {
'input_type': update_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NodeProtonServiceProperties'),
'errors': update_error_dict,
'input_value_validator_list': update_input_value_validator_list,
'output_validator_list': update_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'get': get_rest_metadata,
'resetmanagerlogginglevels': resetmanagerlogginglevels_rest_metadata,
'restart': restart_rest_metadata,
'start': start_rest_metadata,
'stop': stop_rest_metadata,
'update': update_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx.node.services.manager',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _MgmtPlaneBusStub(ApiInterfaceStub):
def __init__(self, config):
# properties for get operation
get_input_type = type.StructType('operation-input', {})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/node/services/mgmt-plane-bus',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for restart operation
restart_input_type = type.StructType('operation-input', {})
restart_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
restart_input_value_validator_list = [
]
restart_output_validator_list = [
]
restart_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/node/services/mgmt-plane-bus?action=restart',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for start operation
start_input_type = type.StructType('operation-input', {})
start_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
start_input_value_validator_list = [
]
start_output_validator_list = [
]
start_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/node/services/mgmt-plane-bus?action=start',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for stop operation
stop_input_type = type.StructType('operation-input', {})
stop_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
stop_input_value_validator_list = [
]
stop_output_validator_list = [
]
stop_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/node/services/mgmt-plane-bus?action=stop',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
operations = {
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NodeServiceProperties'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'restart': {
'input_type': restart_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NodeServiceStatusProperties'),
'errors': restart_error_dict,
'input_value_validator_list': restart_input_value_validator_list,
'output_validator_list': restart_output_validator_list,
'task_type': TaskType.NONE,
},
'start': {
'input_type': start_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NodeServiceStatusProperties'),
'errors': start_error_dict,
'input_value_validator_list': start_input_value_validator_list,
'output_validator_list': start_output_validator_list,
'task_type': TaskType.NONE,
},
'stop': {
'input_type': stop_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NodeServiceStatusProperties'),
'errors': stop_error_dict,
'input_value_validator_list': stop_input_value_validator_list,
'output_validator_list': stop_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'get': get_rest_metadata,
'restart': restart_rest_metadata,
'start': start_rest_metadata,
'stop': stop_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx.node.services.mgmt_plane_bus',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _MigrationCoordinatorStub(ApiInterfaceStub):
def __init__(self, config):
# properties for get operation
get_input_type = type.StructType('operation-input', {})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/node/services/migration-coordinator',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for restart operation
restart_input_type = type.StructType('operation-input', {})
restart_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
restart_input_value_validator_list = [
]
restart_output_validator_list = [
]
restart_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/node/services/migration-coordinator?action=restart',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for start operation
start_input_type = type.StructType('operation-input', {})
start_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
start_input_value_validator_list = [
]
start_output_validator_list = [
]
start_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/node/services/migration-coordinator?action=start',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for stop operation
stop_input_type = type.StructType('operation-input', {})
stop_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
stop_input_value_validator_list = [
]
stop_output_validator_list = [
]
stop_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/node/services/migration-coordinator?action=stop',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
operations = {
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NodeServiceProperties'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'restart': {
'input_type': restart_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NodeServiceStatusProperties'),
'errors': restart_error_dict,
'input_value_validator_list': restart_input_value_validator_list,
'output_validator_list': restart_output_validator_list,
'task_type': TaskType.NONE,
},
'start': {
'input_type': start_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NodeServiceStatusProperties'),
'errors': start_error_dict,
'input_value_validator_list': start_input_value_validator_list,
'output_validator_list': start_output_validator_list,
'task_type': TaskType.NONE,
},
'stop': {
'input_type': stop_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NodeServiceStatusProperties'),
'errors': stop_error_dict,
'input_value_validator_list': stop_input_value_validator_list,
'output_validator_list': stop_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'get': get_rest_metadata,
'restart': restart_rest_metadata,
'start': start_rest_metadata,
'stop': stop_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx.node.services.migration_coordinator',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _NodeMgmtStub(ApiInterfaceStub):
def __init__(self, config):
# properties for get operation
get_input_type = type.StructType('operation-input', {})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/node/services/node-mgmt',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for restart operation
restart_input_type = type.StructType('operation-input', {})
restart_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
restart_input_value_validator_list = [
]
restart_output_validator_list = [
]
restart_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/node/services/node-mgmt?action=restart',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
operations = {
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NodeServiceProperties'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'restart': {
'input_type': restart_input_type,
'output_type': type.VoidType(),
'errors': restart_error_dict,
'input_value_validator_list': restart_input_value_validator_list,
'output_validator_list': restart_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'get': get_rest_metadata,
'restart': restart_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx.node.services.node_mgmt',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _NodeStatsStub(ApiInterfaceStub):
def __init__(self, config):
# properties for get operation
get_input_type = type.StructType('operation-input', {})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/node/services/node-stats',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for restart operation
restart_input_type = type.StructType('operation-input', {})
restart_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
restart_input_value_validator_list = [
]
restart_output_validator_list = [
]
restart_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/node/services/node-stats?action=restart',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for start operation
start_input_type = type.StructType('operation-input', {})
start_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
start_input_value_validator_list = [
]
start_output_validator_list = [
]
start_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/node/services/node-stats?action=start',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for stop operation
stop_input_type = type.StructType('operation-input', {})
stop_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
stop_input_value_validator_list = [
]
stop_output_validator_list = [
]
stop_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/node/services/node-stats?action=stop',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
operations = {
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NodeServiceProperties'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'restart': {
'input_type': restart_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NodeServiceStatusProperties'),
'errors': restart_error_dict,
'input_value_validator_list': restart_input_value_validator_list,
'output_validator_list': restart_output_validator_list,
'task_type': TaskType.NONE,
},
'start': {
'input_type': start_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NodeServiceStatusProperties'),
'errors': start_error_dict,
'input_value_validator_list': start_input_value_validator_list,
'output_validator_list': start_output_validator_list,
'task_type': TaskType.NONE,
},
'stop': {
'input_type': stop_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NodeServiceStatusProperties'),
'errors': stop_error_dict,
'input_value_validator_list': stop_input_value_validator_list,
'output_validator_list': stop_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'get': get_rest_metadata,
'restart': restart_rest_metadata,
'start': start_rest_metadata,
'stop': stop_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx.node.services.node_stats',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _NsxMessageBusStub(ApiInterfaceStub):
def __init__(self, config):
# properties for get operation
get_input_type = type.StructType('operation-input', {})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/node/services/nsx-message-bus',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for restart operation
restart_input_type = type.StructType('operation-input', {})
restart_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
restart_input_value_validator_list = [
]
restart_output_validator_list = [
]
restart_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/node/services/nsx-message-bus?action=restart',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for start operation
start_input_type = type.StructType('operation-input', {})
start_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
start_input_value_validator_list = [
]
start_output_validator_list = [
]
start_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/node/services/nsx-message-bus?action=start',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for stop operation
stop_input_type = type.StructType('operation-input', {})
stop_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
stop_input_value_validator_list = [
]
stop_output_validator_list = [
]
stop_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/node/services/nsx-message-bus?action=stop',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
operations = {
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NodeServiceProperties'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'restart': {
'input_type': restart_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NodeServiceStatusProperties'),
'errors': restart_error_dict,
'input_value_validator_list': restart_input_value_validator_list,
'output_validator_list': restart_output_validator_list,
'task_type': TaskType.NONE,
},
'start': {
'input_type': start_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NodeServiceStatusProperties'),
'errors': start_error_dict,
'input_value_validator_list': start_input_value_validator_list,
'output_validator_list': start_output_validator_list,
'task_type': TaskType.NONE,
},
'stop': {
'input_type': stop_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NodeServiceStatusProperties'),
'errors': stop_error_dict,
'input_value_validator_list': stop_input_value_validator_list,
'output_validator_list': stop_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'get': get_rest_metadata,
'restart': restart_rest_metadata,
'start': start_rest_metadata,
'stop': stop_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx.node.services.nsx_message_bus',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _NsxPlatformClientStub(ApiInterfaceStub):
def __init__(self, config):
# properties for get operation
get_input_type = type.StructType('operation-input', {})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/node/services/nsx-platform-client',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for restart operation
restart_input_type = type.StructType('operation-input', {})
restart_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
restart_input_value_validator_list = [
]
restart_output_validator_list = [
]
restart_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/node/services/nsx-platform-client?action=restart',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for start operation
start_input_type = type.StructType('operation-input', {})
start_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
start_input_value_validator_list = [
]
start_output_validator_list = [
]
start_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/node/services/nsx-platform-client?action=start',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for stop operation
stop_input_type = type.StructType('operation-input', {})
stop_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
stop_input_value_validator_list = [
]
stop_output_validator_list = [
]
stop_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/node/services/nsx-platform-client?action=stop',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
operations = {
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NodeServiceProperties'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'restart': {
'input_type': restart_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NodeServiceStatusProperties'),
'errors': restart_error_dict,
'input_value_validator_list': restart_input_value_validator_list,
'output_validator_list': restart_output_validator_list,
'task_type': TaskType.NONE,
},
'start': {
'input_type': start_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NodeServiceStatusProperties'),
'errors': start_error_dict,
'input_value_validator_list': start_input_value_validator_list,
'output_validator_list': start_output_validator_list,
'task_type': TaskType.NONE,
},
'stop': {
'input_type': stop_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NodeServiceStatusProperties'),
'errors': stop_error_dict,
'input_value_validator_list': stop_input_value_validator_list,
'output_validator_list': stop_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'get': get_rest_metadata,
'restart': restart_rest_metadata,
'start': start_rest_metadata,
'stop': stop_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx.node.services.nsx_platform_client',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _NsxUpgradeAgentStub(ApiInterfaceStub):
def __init__(self, config):
# properties for get operation
get_input_type = type.StructType('operation-input', {})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/node/services/nsx-upgrade-agent',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for restart operation
restart_input_type = type.StructType('operation-input', {})
restart_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
restart_input_value_validator_list = [
]
restart_output_validator_list = [
]
restart_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/node/services/nsx-upgrade-agent?action=restart',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for start operation
start_input_type = type.StructType('operation-input', {})
start_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
start_input_value_validator_list = [
]
start_output_validator_list = [
]
start_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/node/services/nsx-upgrade-agent?action=start',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for stop operation
stop_input_type = type.StructType('operation-input', {})
stop_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
stop_input_value_validator_list = [
]
stop_output_validator_list = [
]
stop_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/node/services/nsx-upgrade-agent?action=stop',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
operations = {
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NodeServiceProperties'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'restart': {
'input_type': restart_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NodeServiceStatusProperties'),
'errors': restart_error_dict,
'input_value_validator_list': restart_input_value_validator_list,
'output_validator_list': restart_output_validator_list,
'task_type': TaskType.NONE,
},
'start': {
'input_type': start_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NodeServiceStatusProperties'),
'errors': start_error_dict,
'input_value_validator_list': start_input_value_validator_list,
'output_validator_list': start_output_validator_list,
'task_type': TaskType.NONE,
},
'stop': {
'input_type': stop_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NodeServiceStatusProperties'),
'errors': stop_error_dict,
'input_value_validator_list': stop_input_value_validator_list,
'output_validator_list': stop_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'get': get_rest_metadata,
'restart': restart_rest_metadata,
'start': start_rest_metadata,
'stop': stop_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx.node.services.nsx_upgrade_agent',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _NtpStub(ApiInterfaceStub):
def __init__(self, config):
# properties for get operation
get_input_type = type.StructType('operation-input', {})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/node/services/ntp',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for restart operation
restart_input_type = type.StructType('operation-input', {})
restart_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
restart_input_value_validator_list = [
]
restart_output_validator_list = [
]
restart_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/node/services/ntp?action=restart',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for start operation
start_input_type = type.StructType('operation-input', {})
start_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
start_input_value_validator_list = [
]
start_output_validator_list = [
]
start_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/node/services/ntp?action=start',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for stop operation
stop_input_type = type.StructType('operation-input', {})
stop_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
stop_input_value_validator_list = [
]
stop_output_validator_list = [
]
stop_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/node/services/ntp?action=stop',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for update operation
update_input_type = type.StructType('operation-input', {
'node_ntp_service_properties': type.ReferenceType('com.vmware.nsx.model_client', 'NodeNtpServiceProperties'),
})
update_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
update_input_value_validator_list = [
]
update_output_validator_list = [
]
update_rest_metadata = OperationRestMetadata(
http_method='PUT',
url_template='/api/v1/node/services/ntp',
request_body_parameter='node_ntp_service_properties',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
operations = {
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NodeNtpServiceProperties'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'restart': {
'input_type': restart_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NodeServiceStatusProperties'),
'errors': restart_error_dict,
'input_value_validator_list': restart_input_value_validator_list,
'output_validator_list': restart_output_validator_list,
'task_type': TaskType.NONE,
},
'start': {
'input_type': start_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NodeServiceStatusProperties'),
'errors': start_error_dict,
'input_value_validator_list': start_input_value_validator_list,
'output_validator_list': start_output_validator_list,
'task_type': TaskType.NONE,
},
'stop': {
'input_type': stop_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NodeServiceStatusProperties'),
'errors': stop_error_dict,
'input_value_validator_list': stop_input_value_validator_list,
'output_validator_list': stop_output_validator_list,
'task_type': TaskType.NONE,
},
'update': {
'input_type': update_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NodeNtpServiceProperties'),
'errors': update_error_dict,
'input_value_validator_list': update_input_value_validator_list,
'output_validator_list': update_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'get': get_rest_metadata,
'restart': restart_rest_metadata,
'start': start_rest_metadata,
'stop': stop_rest_metadata,
'update': update_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx.node.services.ntp',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _PolicyStub(ApiInterfaceStub):
def __init__(self, config):
# properties for get operation
get_input_type = type.StructType('operation-input', {})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/node/services/policy',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for resetmanagerlogginglevels operation
resetmanagerlogginglevels_input_type = type.StructType('operation-input', {})
resetmanagerlogginglevels_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
resetmanagerlogginglevels_input_value_validator_list = [
]
resetmanagerlogginglevels_output_validator_list = [
]
resetmanagerlogginglevels_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/node/services/policy?action=reset-manager-logging-levels',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for restart operation
restart_input_type = type.StructType('operation-input', {})
restart_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
restart_input_value_validator_list = [
]
restart_output_validator_list = [
]
restart_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/node/services/policy?action=restart',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for start operation
start_input_type = type.StructType('operation-input', {})
start_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
start_input_value_validator_list = [
]
start_output_validator_list = [
]
start_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/node/services/policy?action=start',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for stop operation
stop_input_type = type.StructType('operation-input', {})
stop_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
stop_input_value_validator_list = [
]
stop_output_validator_list = [
]
stop_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/node/services/policy?action=stop',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for update operation
update_input_type = type.StructType('operation-input', {
'node_policy_service_properties': type.ReferenceType('com.vmware.nsx.model_client', 'NodePolicyServiceProperties'),
})
update_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
update_input_value_validator_list = [
]
update_output_validator_list = [
]
update_rest_metadata = OperationRestMetadata(
http_method='PUT',
url_template='/api/v1/node/services/policy',
request_body_parameter='node_policy_service_properties',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
operations = {
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NodePolicyServiceProperties'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'resetmanagerlogginglevels': {
'input_type': resetmanagerlogginglevels_input_type,
'output_type': type.VoidType(),
'errors': resetmanagerlogginglevels_error_dict,
'input_value_validator_list': resetmanagerlogginglevels_input_value_validator_list,
'output_validator_list': resetmanagerlogginglevels_output_validator_list,
'task_type': TaskType.NONE,
},
'restart': {
'input_type': restart_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NodeServiceStatusProperties'),
'errors': restart_error_dict,
'input_value_validator_list': restart_input_value_validator_list,
'output_validator_list': restart_output_validator_list,
'task_type': TaskType.NONE,
},
'start': {
'input_type': start_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NodeServiceStatusProperties'),
'errors': start_error_dict,
'input_value_validator_list': start_input_value_validator_list,
'output_validator_list': start_output_validator_list,
'task_type': TaskType.NONE,
},
'stop': {
'input_type': stop_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NodeServiceStatusProperties'),
'errors': stop_error_dict,
'input_value_validator_list': stop_input_value_validator_list,
'output_validator_list': stop_output_validator_list,
'task_type': TaskType.NONE,
},
'update': {
'input_type': update_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NodePolicyServiceProperties'),
'errors': update_error_dict,
'input_value_validator_list': update_input_value_validator_list,
'output_validator_list': update_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'get': get_rest_metadata,
'resetmanagerlogginglevels': resetmanagerlogginglevels_rest_metadata,
'restart': restart_rest_metadata,
'start': start_rest_metadata,
'stop': stop_rest_metadata,
'update': update_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx.node.services.policy',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _SearchStub(ApiInterfaceStub):
def __init__(self, config):
# properties for get operation
get_input_type = type.StructType('operation-input', {})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/node/services/search',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for restart operation
restart_input_type = type.StructType('operation-input', {})
restart_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
restart_input_value_validator_list = [
]
restart_output_validator_list = [
]
restart_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/node/services/search?action=restart',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for start operation
start_input_type = type.StructType('operation-input', {})
start_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
start_input_value_validator_list = [
]
start_output_validator_list = [
]
start_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/node/services/search?action=start',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for stop operation
stop_input_type = type.StructType('operation-input', {})
stop_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
stop_input_value_validator_list = [
]
stop_output_validator_list = [
]
stop_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/node/services/search?action=stop',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
operations = {
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NodeServiceProperties'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'restart': {
'input_type': restart_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NodeServiceStatusProperties'),
'errors': restart_error_dict,
'input_value_validator_list': restart_input_value_validator_list,
'output_validator_list': restart_output_validator_list,
'task_type': TaskType.NONE,
},
'start': {
'input_type': start_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NodeServiceStatusProperties'),
'errors': start_error_dict,
'input_value_validator_list': start_input_value_validator_list,
'output_validator_list': start_output_validator_list,
'task_type': TaskType.NONE,
},
'stop': {
'input_type': stop_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NodeServiceStatusProperties'),
'errors': stop_error_dict,
'input_value_validator_list': stop_input_value_validator_list,
'output_validator_list': stop_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'get': get_rest_metadata,
'restart': restart_rest_metadata,
'start': start_rest_metadata,
'stop': stop_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx.node.services.search',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _SnmpStub(ApiInterfaceStub):
def __init__(self, config):
# properties for get operation
get_input_type = type.StructType('operation-input', {})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/node/services/snmp',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for restart operation
restart_input_type = type.StructType('operation-input', {})
restart_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
restart_input_value_validator_list = [
]
restart_output_validator_list = [
]
restart_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/node/services/snmp?action=restart',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for start operation
start_input_type = type.StructType('operation-input', {})
start_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
start_input_value_validator_list = [
]
start_output_validator_list = [
]
start_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/node/services/snmp?action=start',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for stop operation
stop_input_type = type.StructType('operation-input', {})
stop_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
stop_input_value_validator_list = [
]
stop_output_validator_list = [
]
stop_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/node/services/snmp?action=stop',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for update operation
update_input_type = type.StructType('operation-input', {
'node_snmp_service_properties': type.ReferenceType('com.vmware.nsx.model_client', 'NodeSnmpServiceProperties'),
})
update_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.concurrent_change':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ConcurrentChange'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
update_input_value_validator_list = [
]
update_output_validator_list = [
]
update_rest_metadata = OperationRestMetadata(
http_method='PUT',
url_template='/api/v1/node/services/snmp',
request_body_parameter='node_snmp_service_properties',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
operations = {
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NodeSnmpServiceProperties'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'restart': {
'input_type': restart_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NodeServiceStatusProperties'),
'errors': restart_error_dict,
'input_value_validator_list': restart_input_value_validator_list,
'output_validator_list': restart_output_validator_list,
'task_type': TaskType.NONE,
},
'start': {
'input_type': start_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NodeServiceStatusProperties'),
'errors': start_error_dict,
'input_value_validator_list': start_input_value_validator_list,
'output_validator_list': start_output_validator_list,
'task_type': TaskType.NONE,
},
'stop': {
'input_type': stop_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NodeServiceStatusProperties'),
'errors': stop_error_dict,
'input_value_validator_list': stop_input_value_validator_list,
'output_validator_list': stop_output_validator_list,
'task_type': TaskType.NONE,
},
'update': {
'input_type': update_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NodeSnmpServiceProperties'),
'errors': update_error_dict,
'input_value_validator_list': update_input_value_validator_list,
'output_validator_list': update_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'get': get_rest_metadata,
'restart': restart_rest_metadata,
'start': start_rest_metadata,
'stop': stop_rest_metadata,
'update': update_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx.node.services.snmp',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _SshStub(ApiInterfaceStub):
def __init__(self, config):
# properties for get operation
get_input_type = type.StructType('operation-input', {})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/node/services/ssh',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for removehostfingerprint operation
removehostfingerprint_input_type = type.StructType('operation-input', {
'known_host_parameter': type.ReferenceType('com.vmware.nsx.model_client', 'KnownHostParameter'),
})
removehostfingerprint_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
removehostfingerprint_input_value_validator_list = [
]
removehostfingerprint_output_validator_list = [
]
removehostfingerprint_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/node/services/ssh?action=remove_host_fingerprint',
request_body_parameter='known_host_parameter',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for restart operation
restart_input_type = type.StructType('operation-input', {})
restart_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
restart_input_value_validator_list = [
]
restart_output_validator_list = [
]
restart_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/node/services/ssh?action=restart',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for start operation
start_input_type = type.StructType('operation-input', {})
start_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
start_input_value_validator_list = [
]
start_output_validator_list = [
]
start_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/node/services/ssh?action=start',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for stop operation
stop_input_type = type.StructType('operation-input', {})
stop_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
stop_input_value_validator_list = [
]
stop_output_validator_list = [
]
stop_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/node/services/ssh?action=stop',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for update operation
update_input_type = type.StructType('operation-input', {
'node_ssh_service_properties': type.ReferenceType('com.vmware.nsx.model_client', 'NodeSshServiceProperties'),
})
update_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
update_input_value_validator_list = [
]
update_output_validator_list = [
]
update_rest_metadata = OperationRestMetadata(
http_method='PUT',
url_template='/api/v1/node/services/ssh',
request_body_parameter='node_ssh_service_properties',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
operations = {
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NodeSshServiceProperties'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'removehostfingerprint': {
'input_type': removehostfingerprint_input_type,
'output_type': type.VoidType(),
'errors': removehostfingerprint_error_dict,
'input_value_validator_list': removehostfingerprint_input_value_validator_list,
'output_validator_list': removehostfingerprint_output_validator_list,
'task_type': TaskType.NONE,
},
'restart': {
'input_type': restart_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NodeServiceStatusProperties'),
'errors': restart_error_dict,
'input_value_validator_list': restart_input_value_validator_list,
'output_validator_list': restart_output_validator_list,
'task_type': TaskType.NONE,
},
'start': {
'input_type': start_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NodeServiceStatusProperties'),
'errors': start_error_dict,
'input_value_validator_list': start_input_value_validator_list,
'output_validator_list': start_output_validator_list,
'task_type': TaskType.NONE,
},
'stop': {
'input_type': stop_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NodeServiceStatusProperties'),
'errors': stop_error_dict,
'input_value_validator_list': stop_input_value_validator_list,
'output_validator_list': stop_output_validator_list,
'task_type': TaskType.NONE,
},
'update': {
'input_type': update_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NodeSshServiceProperties'),
'errors': update_error_dict,
'input_value_validator_list': update_input_value_validator_list,
'output_validator_list': update_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'get': get_rest_metadata,
'removehostfingerprint': removehostfingerprint_rest_metadata,
'restart': restart_rest_metadata,
'start': start_rest_metadata,
'stop': stop_rest_metadata,
'update': update_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx.node.services.ssh',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _SyslogStub(ApiInterfaceStub):
def __init__(self, config):
# properties for get operation
get_input_type = type.StructType('operation-input', {})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/node/services/syslog',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for restart operation
restart_input_type = type.StructType('operation-input', {})
restart_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
restart_input_value_validator_list = [
]
restart_output_validator_list = [
]
restart_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/node/services/syslog?action=restart',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for start operation
start_input_type = type.StructType('operation-input', {})
start_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
start_input_value_validator_list = [
]
start_output_validator_list = [
]
start_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/node/services/syslog?action=start',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for stop operation
stop_input_type = type.StructType('operation-input', {})
stop_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
stop_input_value_validator_list = [
]
stop_output_validator_list = [
]
stop_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/node/services/syslog?action=stop',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
operations = {
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NodeServiceProperties'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'restart': {
'input_type': restart_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NodeServiceStatusProperties'),
'errors': restart_error_dict,
'input_value_validator_list': restart_input_value_validator_list,
'output_validator_list': restart_output_validator_list,
'task_type': TaskType.NONE,
},
'start': {
'input_type': start_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NodeServiceStatusProperties'),
'errors': start_error_dict,
'input_value_validator_list': start_input_value_validator_list,
'output_validator_list': start_output_validator_list,
'task_type': TaskType.NONE,
},
'stop': {
'input_type': stop_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NodeServiceStatusProperties'),
'errors': stop_error_dict,
'input_value_validator_list': stop_input_value_validator_list,
'output_validator_list': stop_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'get': get_rest_metadata,
'restart': restart_rest_metadata,
'start': start_rest_metadata,
'stop': stop_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx.node.services.syslog',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _TelemetryStub(ApiInterfaceStub):
def __init__(self, config):
# properties for get operation
get_input_type = type.StructType('operation-input', {})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/node/services/telemetry',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for restart operation
restart_input_type = type.StructType('operation-input', {})
restart_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
restart_input_value_validator_list = [
]
restart_output_validator_list = [
]
restart_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/node/services/telemetry?action=restart',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for start operation
start_input_type = type.StructType('operation-input', {})
start_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
start_input_value_validator_list = [
]
start_output_validator_list = [
]
start_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/node/services/telemetry?action=start',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for stop operation
stop_input_type = type.StructType('operation-input', {})
stop_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
stop_input_value_validator_list = [
]
stop_output_validator_list = [
]
stop_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/node/services/telemetry?action=stop',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
operations = {
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NodeServiceProperties'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'restart': {
'input_type': restart_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NodeServiceStatusProperties'),
'errors': restart_error_dict,
'input_value_validator_list': restart_input_value_validator_list,
'output_validator_list': restart_output_validator_list,
'task_type': TaskType.NONE,
},
'start': {
'input_type': start_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NodeServiceStatusProperties'),
'errors': start_error_dict,
'input_value_validator_list': start_input_value_validator_list,
'output_validator_list': start_output_validator_list,
'task_type': TaskType.NONE,
},
'stop': {
'input_type': stop_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NodeServiceStatusProperties'),
'errors': stop_error_dict,
'input_value_validator_list': stop_input_value_validator_list,
'output_validator_list': stop_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'get': get_rest_metadata,
'restart': restart_rest_metadata,
'start': start_rest_metadata,
'stop': stop_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx.node.services.telemetry',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _UiServiceStub(ApiInterfaceStub):
def __init__(self, config):
# properties for get operation
get_input_type = type.StructType('operation-input', {})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/node/services/ui-service',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for restart operation
restart_input_type = type.StructType('operation-input', {})
restart_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
restart_input_value_validator_list = [
]
restart_output_validator_list = [
]
restart_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/node/services/ui-service?action=restart',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for start operation
start_input_type = type.StructType('operation-input', {})
start_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
start_input_value_validator_list = [
]
start_output_validator_list = [
]
start_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/node/services/ui-service?action=start',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for stop operation
stop_input_type = type.StructType('operation-input', {})
stop_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
stop_input_value_validator_list = [
]
stop_output_validator_list = [
]
stop_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/node/services/ui-service?action=stop',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
operations = {
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NodeServiceProperties'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'restart': {
'input_type': restart_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NodeServiceStatusProperties'),
'errors': restart_error_dict,
'input_value_validator_list': restart_input_value_validator_list,
'output_validator_list': restart_output_validator_list,
'task_type': TaskType.NONE,
},
'start': {
'input_type': start_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NodeServiceStatusProperties'),
'errors': start_error_dict,
'input_value_validator_list': start_input_value_validator_list,
'output_validator_list': start_output_validator_list,
'task_type': TaskType.NONE,
},
'stop': {
'input_type': stop_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'NodeServiceStatusProperties'),
'errors': stop_error_dict,
'input_value_validator_list': stop_input_value_validator_list,
'output_validator_list': stop_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'get': get_rest_metadata,
'restart': restart_rest_metadata,
'start': start_rest_metadata,
'stop': stop_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx.node.services.ui_service',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class StubFactory(StubFactoryBase):
_attrs = {
'ClusterManager': ClusterManager,
'CmInventory': CmInventory,
'Controller': Controller,
'Http': Http,
'InstallUpgrade': InstallUpgrade,
'Liagent': Liagent,
'Manager': Manager,
'MgmtPlaneBus': MgmtPlaneBus,
'MigrationCoordinator': MigrationCoordinator,
'NodeMgmt': NodeMgmt,
'NodeStats': NodeStats,
'NsxMessageBus': NsxMessageBus,
'NsxPlatformClient': NsxPlatformClient,
'NsxUpgradeAgent': NsxUpgradeAgent,
'Ntp': Ntp,
'Policy': Policy,
'Search': Search,
'Snmp': Snmp,
'Ssh': Ssh,
'Syslog': Syslog,
'Telemetry': Telemetry,
'UiService': UiService,
'cluster_manager': 'com.vmware.nsx.node.services.cluster_manager_client.StubFactory',
'cm_inventory': 'com.vmware.nsx.node.services.cm_inventory_client.StubFactory',
'controller': 'com.vmware.nsx.node.services.controller_client.StubFactory',
'http': 'com.vmware.nsx.node.services.http_client.StubFactory',
'install_upgrade': 'com.vmware.nsx.node.services.install_upgrade_client.StubFactory',
'liagent': 'com.vmware.nsx.node.services.liagent_client.StubFactory',
'manager': 'com.vmware.nsx.node.services.manager_client.StubFactory',
'mgmt_plane_bus': 'com.vmware.nsx.node.services.mgmt_plane_bus_client.StubFactory',
'migration_coordinator': 'com.vmware.nsx.node.services.migration_coordinator_client.StubFactory',
'node_mgmt': 'com.vmware.nsx.node.services.node_mgmt_client.StubFactory',
'node_stats': 'com.vmware.nsx.node.services.node_stats_client.StubFactory',
'nsx_message_bus': 'com.vmware.nsx.node.services.nsx_message_bus_client.StubFactory',
'nsx_platform_client': 'com.vmware.nsx.node.services.nsx_platform_client_client.StubFactory',
'nsx_upgrade_agent': 'com.vmware.nsx.node.services.nsx_upgrade_agent_client.StubFactory',
'ntp': 'com.vmware.nsx.node.services.ntp_client.StubFactory',
'policy': 'com.vmware.nsx.node.services.policy_client.StubFactory',
'search': 'com.vmware.nsx.node.services.search_client.StubFactory',
'snmp': 'com.vmware.nsx.node.services.snmp_client.StubFactory',
'ssh': 'com.vmware.nsx.node.services.ssh_client.StubFactory',
'syslog': 'com.vmware.nsx.node.services.syslog_client.StubFactory',
'telemetry': 'com.vmware.nsx.node.services.telemetry_client.StubFactory',
'ui_service': 'com.vmware.nsx.node.services.ui_service_client.StubFactory',
}
| 43.725588
| 144
| 0.618795
|
be2366e41173814f53272b867662b8d68ad4cff0
| 9,743
|
py
|
Python
|
pysat/tests/test_constellation.py
|
pysat/pysat
|
4d12a09ea585b88d54560413e03cae9289113718
|
[
"BSD-3-Clause"
] | 68
|
2019-09-18T19:08:07.000Z
|
2022-03-28T23:22:04.000Z
|
pysat/tests/test_constellation.py
|
iamaSam/pysat
|
4d12a09ea585b88d54560413e03cae9289113718
|
[
"BSD-3-Clause"
] | 603
|
2019-09-18T15:24:37.000Z
|
2022-03-30T20:13:43.000Z
|
pysat/tests/test_constellation.py
|
iamaSam/pysat
|
4d12a09ea585b88d54560413e03cae9289113718
|
[
"BSD-3-Clause"
] | 24
|
2015-04-08T09:33:51.000Z
|
2019-09-06T22:01:34.000Z
|
#!/usr/bin/env python
# Full license can be found in License.md
# Full author list can be found in .zenodo.json file
# DOI:10.5281/zenodo.1199703
# ----------------------------------------------------------------------------
import datetime as dt
import pandas as pds
import pytest
import pysat
from pysat import constellations
class TestConstellationInit:
"""Test the Constellation class."""
def setup(self):
"""Create instruments and a constellation for each test
"""
self.instruments = constellations.single_test.instruments
self.in_kwargs = {"instruments": self.instruments,
"const_module": constellations.single_test}
self.const = None
self.ref_time = pysat.instruments.pysat_testing._test_dates['']['']
def teardown(self):
"""Clean up after each test
"""
del self.const, self.instruments, self.in_kwargs, self.ref_time
@pytest.mark.parametrize("ikey,ival,ilen",
[("const_module", None, 1),
("instruments", None, 1),
(None, None, 2)])
def test_construct_constellation(self, ikey, ival, ilen):
"""Construct a Constellation with good input
"""
if ikey is not None:
self.in_kwargs[ikey] = ival
self.const = pysat.Constellation(**self.in_kwargs)
assert len(self.const.instruments) == ilen
return
def test_init_constellation_bad_inst_module(self):
"""Test Constellation raises AttributeError with bad inst_module input.
"""
with pytest.raises(AttributeError) as aerr:
pysat.Constellation(const_module=self.instruments)
assert str(aerr).find("missing required attribute 'instruments'")
return
def test_construct_raises_noniterable_error(self):
"""Attempt to construct a Constellation by const_module and list
"""
with pytest.raises(ValueError) as verr:
self.const = pysat.Constellation(instruments=self.instruments[0])
assert str(verr).find("instruments argument must be list-like")
return
def test_construct_null(self):
"""Attempt to construct a Constellation with no arguments
"""
self.const = pysat.Constellation()
assert len(self.const.instruments) == 0
return
def test_getitem(self):
"""Test Constellation iteration through instruments attribute
"""
self.in_kwargs['const_module'] = None
self.const = pysat.Constellation(**self.in_kwargs)
tst_get_inst = self.const[:]
pysat.utils.testing.assert_lists_equal(self.instruments, tst_get_inst)
return
def test_repr_w_inst(self):
"""Test Constellation string output with instruments loaded
"""
self.in_kwargs['const_module'] = None
self.const = pysat.Constellation(**self.in_kwargs)
out_str = self.const.__repr__()
assert out_str.find("Constellation(instruments") >= 0
return
def test_str_w_inst(self):
"""Test Constellation string output with instruments loaded
"""
self.in_kwargs['const_module'] = None
self.const = pysat.Constellation(**self.in_kwargs)
out_str = self.const.__str__()
assert out_str.find("pysat Constellation ") >= 0
assert out_str.find("Index Platform") > 0
return
def test_str_wo_inst(self):
"""Test Constellation string output without instruments.
"""
self.const = pysat.Constellation()
out_str = self.const.__str__()
assert out_str.find("pysat Constellation ") >= 0
assert out_str.find("No assigned Instruments") > 0
return
@pytest.mark.parametrize("common_index,cstr", [(True, "Common"),
(False, "Full")])
def test_str_with_data(self, common_index, cstr):
"""Test Constellation string output with loaded data."""
self.in_kwargs["common_index"] = common_index
self.const = pysat.Constellation(**self.in_kwargs)
self.const.load(date=self.ref_time)
out_str = self.const.__str__()
assert out_str.find("pysat Constellation ") >= 0
assert out_str.find("{:s} time range".format(cstr)) > 0
return
def test_single_attachment_of_custom_function(self):
"""Test successful attachment of custom function
"""
# Define a custom function
def double_mlt(inst):
dmlt = 2.0 * inst.data.mlt
dmlt.name = 'double_mlt'
inst.data[dmlt.name] = dmlt
return
# Initialize the constellation
self.in_kwargs['const_module'] = None
self.const = pysat.Constellation(**self.in_kwargs)
# Add the custom function
self.const.custom_attach(double_mlt, at_pos='end')
self.const.load(date=self.ref_time)
# Test the added value
for inst in self.const:
assert 'double_mlt' in inst.variables
assert (inst['double_mlt'] == 2.0 * inst['mlt']).all()
return
class TestConstellationFunc:
"""Test the Constellation class attributes and methods."""
def setup(self):
"""Create instruments and a constellation for each test
"""
self.inst = list(constellations.testing.instruments)
self.const = pysat.Constellation(instruments=self.inst)
self.ref_time = pysat.instruments.pysat_testing._test_dates['']['']
def teardown(self):
"""Clean up after each test
"""
del self.inst, self.const, self.ref_time
def test_bounds_passthrough(self):
"""Ensure bounds are applied to each instrument within Constellation"""
# Set bounds
stop_date = self.ref_time + dt.timedelta(days=365)
self.const.bounds = (self.ref_time, stop_date)
# Ensure constellation reports correct dates
assert self.const.bounds[0:2] == ([self.ref_time], [stop_date])
# Test bounds are the same for all instruments
for instrument in self.const:
assert instrument.bounds == self.const.bounds
return
def test_empty_data_index(self):
""" Test the empty index attribute."""
# Test the attribute with no loaded data
assert isinstance(self.const.index, pds.Index)
assert len(self.const.index) == 0
return
def test_empty_data_date(self):
"""Test the date property when no data is loaded."""
assert self.const.date is None
return
def test_empty_variables(self):
"""Test the variables property when no data is loaded."""
assert len(self.const.variables) == 0
return
def test_empty_flag_data_empty(self):
""" Test the status of the empty flag for unloaded data."""
assert self.const.empty
return
def test_empty_flag_data_empty_partial_load(self):
""" Test the status of the empty flag for partially loaded data."""
# Load only one instrument and test the status flag
self.const.instruments[0].load(date=self.ref_time)
assert self.const.empty
return
def test_empty_flag_data_not_empty_partial_load(self):
"""Test the alt status of the empty flag for partially loaded data."""
# Load only one instrument and test the status flag for alternate flag
self.const.instruments[0].load(date=self.ref_time)
assert not self.const._empty(all_inst=False)
return
def test_empty_flag_data_not_empty(self):
""" Test the status of the empty flag for loaded data."""
# Load data and test the status flag
self.const.load(date=self.ref_time)
assert not self.const.empty
return
@pytest.mark.parametrize("ikwarg", [{"common_index": False},
{"index_res": 60.0}])
def test_full_data_index(self, ikwarg):
""" Test the empty index attribute."""
# Test the attribute with loaded data
self.const = pysat.Constellation(instruments=self.inst, **ikwarg)
self.const.load(date=self.ref_time)
assert isinstance(self.const.index, pds.Index)
assert self.const.index[0] == self.ref_time
if "index_res" in ikwarg.keys():
assert self.const.index.freq == pds.DateOffset(
seconds=ikwarg['index_res'])
return
def test_today_yesterday_and_tomorrow(self):
""" Test the correct instantiation of yesterday/today/tomorrow dates
"""
for cinst in self.const.instruments:
assert cinst.today() == self.const.today()
assert cinst.yesterday() == self.const.yesterday()
assert cinst.tomorrow() == self.const.tomorrow()
return
def test_full_data_date(self):
"""Test the date property when no data is loaded."""
# Test the attribute with loaded data
self.const.load(date=self.ref_time)
assert self.const.date == self.ref_time
return
def test_full_variables(self):
"""Test the variables property when no data is loaded."""
# Test the attribute with loaded data
self.const.load(date=self.ref_time)
assert len(self.const.variables) > 0
assert 'uts_pysat_testing' in self.const.variables
assert 'x' in self.const.variables
return
def test_download(self):
"""Check that instruments are downloadable."""
self.const.download(self.ref_time, self.ref_time)
for inst in self.const.instruments:
assert len(inst.files.files) > 0
return
| 36.490637
| 79
| 0.626809
|
b03ce1807ab1dd43c76d3a20691480d60319e240
| 373
|
py
|
Python
|
rx/core/abc/disposable.py
|
mmpio/RxPY
|
4ed60bb5c04aa85de5210e5537a6adfe1b667d50
|
[
"MIT"
] | 4,342
|
2015-01-06T09:00:23.000Z
|
2022-03-28T15:05:50.000Z
|
rx/core/abc/disposable.py
|
mmpio/RxPY
|
4ed60bb5c04aa85de5210e5537a6adfe1b667d50
|
[
"MIT"
] | 613
|
2015-01-07T20:44:56.000Z
|
2022-03-20T06:14:20.000Z
|
rx/core/abc/disposable.py
|
mmpio/RxPY
|
4ed60bb5c04aa85de5210e5537a6adfe1b667d50
|
[
"MIT"
] | 420
|
2015-01-07T14:30:30.000Z
|
2022-03-11T22:47:46.000Z
|
from abc import ABC, abstractmethod
class Disposable(ABC):
"""Disposable abstract base class. Untyped."""
@abstractmethod
def dispose(self):
raise NotImplementedError
def __enter__(self):
"""Context management protocol."""
def __exit__(self, typ, value, traceback):
"""Context management protocol."""
self.dispose()
| 21.941176
| 50
| 0.651475
|
c4cdd0149d036a13946d2e0c851ef613eee225f4
| 2,892
|
py
|
Python
|
Lib/svpelab/pvsim_manual.py
|
jayatsandia/svp_energy_lab
|
5fb7ac244f44e5a4c419ed7bae8b573bf2b7e31c
|
[
"Apache-2.0"
] | null | null | null |
Lib/svpelab/pvsim_manual.py
|
jayatsandia/svp_energy_lab
|
5fb7ac244f44e5a4c419ed7bae8b573bf2b7e31c
|
[
"Apache-2.0"
] | null | null | null |
Lib/svpelab/pvsim_manual.py
|
jayatsandia/svp_energy_lab
|
5fb7ac244f44e5a4c419ed7bae8b573bf2b7e31c
|
[
"Apache-2.0"
] | 3
|
2020-04-05T08:28:21.000Z
|
2020-12-07T22:58:56.000Z
|
"""
Copyright (c) 2017, Sandia National Labs and SunSpec Alliance
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
Redistributions in binary form must reproduce the above copyright notice, this
list of conditions and the following disclaimer in the documentation and/or
other materials provided with the distribution.
Neither the names of the Sandia National Labs and SunSpec Alliance nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
Questions can be directed to support@sunspec.org
"""
import os
import pvsim
manual_info = {
'name': os.path.splitext(os.path.basename(__file__))[0],
'mode': 'Manual'
}
def pvsim_info():
return manual_info
def params(info, group_name):
gname = lambda name: group_name + '.' + name
pname = lambda name: group_name + '.' + GROUP_NAME + '.' + name
mode = manual_info['mode']
info.param_add_value(gname('mode'), mode)
GROUP_NAME = 'manual'
class PVSim(pvsim.PVSim):
def __init__(self, ts, group_name):
pvsim.PVSim.__init__(self, ts, group_name)
def irradiance_set(self, irradiance=1000):
if self.ts.confirm('Please change the irradiance to %0.1f W/m^2.' % irradiance) is False:
raise pvsim.PVSimError('Aborted PV simulation')
def power_set(self, power):
if self.ts.confirm('Please change the power to %0.1f%% power.' % power) is False:
raise pvsim.PVSimError('Aborted PV simulation')
def power_on(self):
if self.ts.confirm('Please turn on PV simulator to give EUT DC power.') is False:
raise pvsim.PVSimError('Aborted PV simulation')
def profile_start(self):
if self.ts.confirm('Please run the PV simulator profile.') is False:
raise pvsim.PVSimError('Aborted PV simulation')
if __name__ == "__main__":
pass
| 37.076923
| 97
| 0.743776
|
605474a43d83f3b4a32d466410dc14e070ac6a5a
| 18,643
|
py
|
Python
|
util/scripts/recparse.py
|
Jonathan727/javarosa
|
6c1c2e62840511430ef22fbb550d3f3f605a1a78
|
[
"Apache-2.0"
] | 10
|
2017-03-24T20:26:54.000Z
|
2020-08-28T18:33:34.000Z
|
util/scripts/recparse.py
|
grzesiek2010/javarosa
|
3eb73b5c8c1d56b9bd635b1aca6a8e9ab1051fa9
|
[
"Apache-2.0"
] | 280
|
2016-06-27T14:46:57.000Z
|
2022-03-31T14:39:44.000Z
|
util/scripts/recparse.py
|
grzesiek2010/javarosa
|
3eb73b5c8c1d56b9bd635b1aca6a8e9ab1051fa9
|
[
"Apache-2.0"
] | 10
|
2016-07-19T05:53:07.000Z
|
2020-10-28T22:02:42.000Z
|
import struct
from datetime import datetime
#TODO: if there is an error when deserializing the record, would be VERY nice to return the partial
#deserialization of the record up to that point
class Datum:
def __init__ (self, type, val):
self.type = type
self.val = val
def __repr__ (self):
return self.pretty_print(suppress_start_indent=True, suppress_end_newline=True)
def pretty_print (self, indent=0, suppress_start_indent=False, suppress_end_newline=False):
return self._pretty_print(indent, suppress_start_indent) + ('\n' if not suppress_end_newline else '')
def _pretty_print (self, indent, suppress_start_indent=False):
buf = ''
IND = ' ' * indent
if not suppress_start_indent:
buf += IND
if self.type in ('int', 'dbl', 'bool', 'str', 'date', 'bytes', 'generic', 'error'):
prefix = {'int': 'i', 'dbl': 'f', 'bool': 'b', 'str': 's', 'date': 'd', 'bytes': 'x', 'generic': '?', 'error': '!'}[self.type]
if self.val != None:
if self.type == 'int':
sval = '%d' % self.val
elif self.type == 'dbl':
sval = '%f' % self.val
elif self.type == 'bool':
sval = ('true' if self.val else 'false')
elif self.type == 'str' or self.type == 'bytes':
sval = repr(self.val)
elif self.type == 'date':
sval = self.val.strftime('%Y-%m-%d %H:%M:%S')
elif self.type == 'error':
sval = '#%d [%s]' % (len(self.val), tohex(self.val))
else:
sval = '<null>'
buf += '%s %s' % (prefix, sval)
elif self.type in ('seq', 'list', 'map') or self.type.startswith('obj:'):
_print_element = lambda e: e._pretty_print(indent + 1)
_print_mapping = lambda (k, v): k._pretty_print(indent + 1) + ' => ' + v._pretty_print(indent + 1, True)
def _iteritems_sorted (map):
for k in sorted(map.keys(), key=lambda datum: datum.val):
yield (k, map[k])
if self.type == 'seq':
config = (True, '()', lambda x: x, _print_element)
elif self.type.startswith('obj:'):
config = (False, '()', lambda x: x, _print_element)
elif self.type == 'list':
config = (True, '[]', lambda x: x, _print_element)
elif self.type == 'map':
config = (True, '{}', _iteritems_sorted, _print_mapping)
(show_count, brackets, iterator, print_elem) = config
buf += self.type + ' '
if self.val != None:
if show_count:
buf += '#%d ' % len(self.val)
buf += brackets[0]
if len(self.val) > 0:
buf += '\n'
for (i, e) in enumerate(iterator(self.val)):
buf += print_elem(e)
if i < len(self.val) - 1:
buf += ','
buf += '\n'
buf += IND
else:
buf += ' '
buf += brackets[1]
else:
buf += '<null>'
return buf
class Type:
def __init__ (self, base, params):
if base.startswith('obj:'):
self.custom = True
self.base = base[4:]
if self.base == '':
raise ValueError('custom object type not specified')
else:
self.custom = False
self.base = base
self.params = params
self.validate()
def basename (self):
return ('obj:' if self.custom else '') + self.base
def validate (self):
allowed = {
'int': 0, 'bool': 0, 'dbl': 0, 'str': 0, 'date': 0, 'bytes': 0,
'obj': 0, 'seq': None, 'null': 1, 'tagged': 0, 'list': 1, 'listp': 0, 'map': 2, 'mapp': 1
}
name = self.base if not self.custom else 'obj'
if name in allowed:
num_args = allowed[name]
if num_args != None and len(self.params) != num_args:
raise ValueError('wrong number of args for [%s]' % self.basename())
else:
raise ValueError('unrecognized type [%s]' % self.base)
def parse (self, stream):
return self.parse_func(stream)(*self.params)
def parse_func (self, stream):
builtin_types = {
'int': stream.read_int,
'bool': stream.read_bool,
'dbl': stream.read_float,
'str': stream.read_string,
'date': stream.read_date,
'bytes': stream.read_binary,
'null': stream.read_null,
'tagged': stream.read_tagged,
'list': stream.read_list,
'listp': stream.read_list_poly,
'map': stream.read_map,
'mapp': stream.read_map_poly,
'seq': lambda *subtypes: Datum('seq', tuple([type.parse(stream) for type in subtypes]))
}
if not self.custom:
return builtin_types[self.base]
else:
if self.base in custom_types:
parse_obj_func = custom_types[self.base]
return lambda: Datum(self.basename(), parse_obj_func(stream))
else:
raise ValueError('unknown object type [%s]' % self.base) #TODO: propogate partial deserialization
def null_datum (self):
if self.base in ['null', 'tagged']:
basetype = 'generic'
elif self.base == 'listp':
basetype = 'list'
elif self.base == 'mapp':
basetype = 'map'
else:
basetype = self.base
return Datum(basetype, None)
def unwrap (self):
if self.base != 'seq' or len(self.params) != 1:
raise ValueError('not a single-item sequence')
return self.params[0]
class Stream:
def __init__ (self, bytes):
self.stream = self.stream_gen(bytes)
self.buffers = []
def stream_gen (self, bytes):
for b in bytes:
yield b
def read (self):
try:
b = self.stream.next()
for buffer in self.buffers:
buffer.append(b)
return b
except StopIteration:
raise self.EndOfStream([''.join(buff) for buff in reversed(self.buffers)])
def mark (self):
self.buffers.append([])
def iter (self):
try:
while True:
yield self.read()
except self.EndOfStream:
raise StopIteration
class EndOfStream (Exception):
bytes = 'not implemented'
def __init__ (self, buffers):
self.buffers = buffers
def __str__ (self):
return 'unexpected end of stream'
class DataStream (Stream):
def __init__ (self, bytes):
Stream.__init__(self, bytes)
def read (self, n=1):
return ''.join([Stream.read(self) for i in range(0, n)])
def read_int (self, require_pos=False):
(buff, c) = ([], None)
while c == None or ord(c) >= 128:
c = self.read()
buff.append(ord(c) % 128)
if buff[0] >= 64:
buff[0] -= 128
val = reduce(lambda x, y: 128 * x + y, buff)
if require_pos and val < 0:
raise ValueError('negative integer') #TODO: propogate partial deserialization
elif len(buff) > 1:
k = len(buff) - 1
vmin = -(128**k / 2)
vmax = 128**k / 2 - 1
if val <= vmax and val >= vmin:
raise ValueError('overlong integer encoding') #TODO: propogate partial deserialization
return Datum('int', val)
def read_string (self):
n = reduce(lambda x, y: 256 * x + y, [ord(b) for b in self.read(2)])
val = self.read(n)
try:
unicode(val, 'utf-8')
except UnicodeDecodeError:
raise #TODO: propogate partial deserialization
return Datum('str', val)
def read_bool (self):
b = ord(self.read())
if b != 0 and b != 1:
raise ValueError('boolean not 0x00 or 0x01') #TODO: propogate partial deserialization
return Datum('bool', b == 1)
def read_float (self):
return Datum('dbl', struct.unpack('!d', self.read(8))[0])
def read_date (self):
try:
return Datum('date', datetime.utcfromtimestamp(self.read_int().val / 1000.))
except ValueError: # out-of-range error
raise ValueError('date ticks overflow') #TODO: propogate partial deserialization
def read_binary (self):
return Datum('bytes', self.read(self.read_int().val))
class CompoundDataStream (DataStream):
def __init__ (self, bytes):
DataStream.__init__(self, bytes)
def read_null (self, type):
if self.read_bool().val:
return type.parse(self)
else:
return type.null_datum()
def read_list (self, type):
return self._read_list(lambda: type.parse(self))
def _read_list (self, get_elem):
v = []
n = self.read_int().val
for i in range(0, n):
v.append(get_elem())
return Datum('list', v)
def read_map (self, keytype, elemtype):
return self._read_map(keytype, lambda: elemtype.parse(self))
def _read_map (self, keytype, get_elem):
m = {}
n = self.read_int().val
for i in range(0, n):
k = keytype.parse(self)
v = get_elem()
m[k] = v
return Datum('map', m)
def read_tagged (self):
return self.read_type().parse(self)
def read_type (self):
tag = self.read(4)
basetype = basetype_from_tag(tag)
if basetype == 'wrapper':
(basetype, params) = self.unwrap_type()
else:
params = []
return Type(basetype, params)
def unwrap_type (self):
subtype = self.read_int().val
if subtype == 0:
return ('null', [self.read_type()])
elif subtype == 32:
return ('list', [self.read_type()])
elif subtype == 33:
return ('listp', [])
elif subtype == 34:
self.read_bool() # 'ordered' flag
return ('map', [self.read_type(), self.read_type()])
elif subtype == 35:
self.read_bool() # 'ordered' flag
return ('mapp', [self.read_type()])
else:
raise ValueError('unrecognized wrapper code [%d]' % subtype) #TODO: propogate partial deserialization
def read_list_poly (self):
return self._read_list(lambda: self.read_tagged())
def read_map_poly (self, keytype):
return self._read_map(keytype, lambda: self.read_tagged())
def read_compound (self, template):
return type_from_template(template).parse(self)
def read_template (self, template):
return type_list_from_template(template).parse(self)
def deserialize (bytes, template):
stream = CompoundDataStream(bytes)
obj = stream.read_compound(template)
#handle botched parsing here?
#handle extra data left over here?
#return (status, obj)
return obj
def type_from_template (template):
return type_list_from_template(template).unwrap()
def type_list_from_template (template):
return Type('seq', tuple([type_from_template_token(token) for token in tokenize(template, ',', '()')]))
def type_from_template_token (token):
if '(' in token and token[-1] != ')':
raise ValueError('extra crap after close paren')
if '(' in token:
name = token.split('(')[0]
args = list(type_list_from_template(token[token.find('(')+1:-1]).params)
else:
name = token
args = []
if len(name) == 0:
raise ValueError('empty token name')
return Type(name, args)
def tokenize (str, sep, brackets):
depth = 0
tok_start = 0
for i in range(0, len(str) + 1):
new_token = False
if i == len(str):
if depth == 0:
new_token = True
else:
raise ValueError('unbalanced brackets')
elif str[i] == sep and depth == 0:
new_token = True
elif str[i] == brackets[0]:
depth += 1
elif str[i] == brackets[1]:
depth -= 1
if depth < 0:
raise ValueError('unbalanced parens')
if new_token:
token = str[tok_start:i]
tok_start = i + 1
yield token
def parse_custom (template):
return lambda stream: stream.read_template(template).val
# relies on stream containing ONLY data for the record
def _parse_property (stream):
return (Datum('str', ''.join(list(stream.iter()))),)
def _parse_tree_child (stream):
if stream.read_bool().val:
val = stream.read_compound('obj:treeelem')
else:
val = stream.read_tagged() # if this happens, which it almost certainly won't, we almost certainly won't have the prototype registered
return (val,)
def _parse_xpath_num_lit (stream):
if stream.read_bool().val:
val = stream.read_float()
else:
val = stream.read_int()
return (val,)
def _parse_xpath_path (stream):
type = stream.read_int()
filtexpr = stream.read_compound('obj:xpath-expr-filt') if type.val == 2 else None
steps = stream.read_compound('list(obj:xpath-step)')
return (type, filtexpr, steps) if filtexpr != None else (type, steps)
def _parse_xpath_step (stream):
axis = stream.read_int()
test = stream.read_int()
if test.val == 0:
detail = stream.read_compound('obj:qname')
elif test.val == 2:
detail = stream.read_string()
elif test.val == 6:
detail = stream.read_compound('null(str)')
else:
detail = None
preds = stream.read_compound('listp')
return (axis, test, detail, preds) if detail != None else (axis, test, preds)
custom_types = {
'rmsinfo': parse_custom('int,int,int'),
'recloc': parse_custom('int,int'),
'user': parse_custom('str,str,int,str,bool,map(str,str)'),
'case': parse_custom('str,str,str,str,bool,null(date),int,mapp(str)'),
'patref': parse_custom('str,date,date,str,str,int,bool'),
'formdef': parse_custom('int,str,null(str),listp,obj:forminst,null(obj:loclzr),list(obj:condition),list(obj:recalc),listp'),
'qdef': parse_custom('int,null(str),null(str),null(str),null(str),null(str),null(str),null(str),int,list(obj:selchoice),null(tagged)'),
'selchoice': parse_custom('bool,str,str'),
'gdef': parse_custom('int,tagged,null(str),null(str),null(str),null(str),bool,listp,bool,null(tagged)'),
'loclzr': parse_custom('bool,bool,map(str,listp),list(str),null(str),null(str)'),
'resfiledatasrc': parse_custom('str'),
'localedatasrc': parse_custom('map(str,str)'),
'condition': parse_custom('tagged,obj:treeref,list(obj:treeref),int,int'),
'recalc': parse_custom('tagged,obj:treeref,list(obj:treeref)'),
'treeref': parse_custom('int,list(str),list(int)'),
'forminst': parse_custom('int,int,null(str),null(str),null(date),map(str,str),obj:treeelem'),
# 'forminst-compact': ..., oh boy...
'treeelem': parse_custom('str,int,bool,null(tagged),null(list(obj:treechildpoly)),int,bool,bool,bool,bool,bool,null(obj:constraint),str,str,list(str)'),
'treechildpoly': _parse_tree_child,
'intdata': parse_custom('int'),
'booldata': parse_custom('bool'),
'strdata': parse_custom('str'),
'selonedata': parse_custom('obj:sel'),
'selmultidata': parse_custom('list(obj:sel)'),
'sel': parse_custom('str,int'),
'floatdata': parse_custom('dbl'),
'datedata': parse_custom('date'),
'datetimedata': parse_custom('date'),
'timedata': parse_custom('date'),
'constraint': parse_custom('tagged,str'),
'xpathcond': parse_custom('tagged'),
'xpathref': parse_custom('str,obj:treeref'),
'xpath-expr-arith': parse_custom('int,tagged,tagged'),
'xpath-expr-bool': parse_custom('int,tagged,tagged'),
'xpath-expr-cmp': parse_custom('int,tagged,tagged'),
'xpath-expr-eq': parse_custom('bool,tagged,tagged'),
'xpath-expr-filt': parse_custom('tagged,listp'),
'xpath-expr-func': parse_custom('obj:qname,listp'),
'xpath-expr-numlit': _parse_xpath_num_lit,
'xpath-expr-numneg': parse_custom('tagged'),
'xpath-expr-path': _parse_xpath_path,
'xpath-expr-strlit': parse_custom('str'),
'xpath-expr-union': parse_custom('tagged,tagged'),
'xpath-expr-varref': parse_custom('obj:qname'),
'xpath-step': _parse_xpath_step,
'qname': parse_custom('null(str),str'),
'property': _parse_property,
'txmsg': parse_custom('tagged'),
'simplehttptxmsg': parse_custom('str,int,str,int,str,date,date,int,int,str,int,str,bytes'),
'logentry': parse_custom('date,str,str'),
'cc-recd-forms-mapping': parse_custom('list(int),map(int,int)')
}
def basetype_from_tag (tag):
type_tags = {
'\xff\xff\xff\xff': 'wrapper',
'\xe5\xe9\xb5\x92': 'generic', #object -- should never be encountered
'\x7c\xa1\x6f\xdb': 'int',
'\x8a\xc5\x87\x0b': 'int', #long
'\xb5\xdc\x2e\x41': 'int', #short
'\x03\x3e\xb3\x91': 'int', #byte
'\x58\x4b\x12\x84': 'char',
'\xe4\xf9\xf9\xae': 'bool',
'\xc9\x83\xee\x7b': 'dbl', #float
'\x8e\xa8\x96\x89': 'dbl',
'\x42\xc2\x5b\xe3': 'str',
'\xc5\x1d\xfd\xa6': 'date',
'\x27\x51\x2e\xc9': 'obj:qdef',
'\xb3\xc4\x9b\xbd': 'obj:gdef',
'\x68\xc2\xaf\xad': 'obj:intdata',
'\x8f\x4b\x45\xfe': 'obj:booldata',
'\xed\xce\xd1\xce': 'obj:geodata',
'\x02\x6f\x56\x15': 'obj:strdata',
'\x29\xd7\x1a\x40': 'obj:selonedata',
'\xf7\x30\xcc\x7d': 'obj:selmultidata',
'\x4e\x52\xe2\x15': 'obj:floatdata',
'\x51\x0e\x1e\x6e': 'obj:datedata',
'\x6f\x87\x88\xa7': 'obj:datetimedata',
'\x68\x4e\x4e\x2e': 'obj:timedata',
'\x2b\xf7\x1a\xcb': 'obj:ptrdata',
'\xec\xa8\xec\xde': 'obj:multiptrdata',
'\xef\x74\x56\x54': 'obj:basicdataptr',
'\xf3\x06\x34\x28': 'obj:xpath-expr-arith',
'\xf6\xe4\xb9\xaf': 'obj:xpath-expr-bool',
'\x91\x2e\xfc\xee': 'obj:xpath-expr-cmp',
'\x65\x71\x6e\x97': 'obj:xpath-expr-eq',
'\xe7\x68\xb3\x6d': 'obj:xpath-expr-filt',
'\x67\x44\xc2\x7e': 'obj:xpath-expr-func',
'\x17\xe0\x31\x27': 'obj:xpath-expr-numlit',
'\x35\x60\xa2\x3b': 'obj:xpath-expr-numneg',
'\xfc\x87\x51\x53': 'obj:xpath-expr-path',
'\xef\x45\x98\x8f': 'obj:xpath-expr-strlit',
'\xff\x82\x5b\x62': 'obj:xpath-expr-union',
'\xf9\x4b\xf7\xa8': 'obj:xpath-expr-varref',
'\x5c\x57\xbb\x5e': 'obj:xpathref',
'\x5e\x88\x11\xfe': 'obj:xpathcond',
'\xf4\xaa\xb2\xe9': 'obj:resfiledatasrc',
'\xf6\xc7\x83\x5c': 'obj:localedatasrc',
'\x27\x53\xac\x23': 'obj:simplehttptxmsg',
'\x01\x12\x89\x43': 'obj:smstxmsg',
'\x21\x71\xd6\x5d': 'obj:binsmstxmsg',
# '\xed\x09\xe3\x8e': 'obj:forminst', #unused i think
# '\xfb\x2c\xa2\x76': 'obj:txmsgserwrapper' #unused i think
}
if tag in type_tags:
basetype = type_tags[tag]
if basetype == 'generic':
raise ValueError("'generic' type tag should never show up in practice")
return basetype
else:
raise ValueError("no type known for tag %s" % tohex(tag)) #TODO: propogate partial deserialization
def hexinput (hexstr):
return ''.join([chr(int(c, 16)) for c in hexstr.split()])
def tohex (bytes):
return ' '.join(['%02x' % ord(b) for b in bytes])
| 34.207339
| 155
| 0.600654
|
9acba265333cb06442cf3ac8c7f6c48334754143
| 4,122
|
py
|
Python
|
tests/test_authorized.py
|
kids-first/kf-api-study-creator
|
93a79b108b6474f9b4135ace06c89ddcf63dd257
|
[
"Apache-2.0"
] | 3
|
2019-05-04T02:07:28.000Z
|
2020-10-16T17:47:44.000Z
|
tests/test_authorized.py
|
kids-first/kf-api-study-creator
|
93a79b108b6474f9b4135ace06c89ddcf63dd257
|
[
"Apache-2.0"
] | 604
|
2019-02-21T18:14:51.000Z
|
2022-02-10T08:13:54.000Z
|
tests/test_authorized.py
|
kids-first/kf-api-study-creator
|
93a79b108b6474f9b4135ace06c89ddcf63dd257
|
[
"Apache-2.0"
] | null | null | null |
import pytest
from django.contrib.auth import get_user_model
from creator.studies.models import Membership
from creator.studies.factories import StudyFactory
from creator.files.factories import FileFactory
User = get_user_model()
@pytest.fixture
def versions(db, clients, mocker):
client = clients.get("Administrators")
study = StudyFactory()
file = FileFactory(study=study)
version = file.versions.latest("created_at")
version.key = open(f"tests/data/manifest.txt")
mock_resp = mocker.patch("creator.files.views._resolve_version")
mock_resp.return_value = (file, version)
return study, file, version
@pytest.mark.parametrize("resource", ["study", "file", "version"])
@pytest.mark.parametrize(
"user_group,allowed",
[
("Administrators", True),
("Services", True),
("Developers", True),
("Investigators", True),
("Bioinformatics", True),
(None, False),
],
)
def test_get_resource_by_id(
db, clients, versions, resource, user_group, allowed
):
"""
Test that resource may be retrieved by (relay) id
- Should return resource if admin
- Should return resource if user who is part of study
- Should return None if user who is not part of study
- Should return None if not an authenticated user
"""
# Select client based on user type
admin_client = clients.get("Administrators")
client = clients.get(user_group)
study, file, version = versions
user = User.objects.filter(groups__name=user_group).first()
Membership(collaborator=user, study=study).save()
# Get the id of the resource we're testing for
kf_id = {
"study": study.kf_id,
"file": file.kf_id,
"version": version.kf_id,
}[resource]
# Get a node's relay id using admin client
query = f'{{{resource}ByKfId(kfId: "{kf_id}") {{ id }} }}'
resp = admin_client.post(
"/graphql", data={"query": query}, content_type="application/json"
)
assert "id" in resp.json()["data"][f"{resource}ByKfId"]
node_id = resp.json()["data"][f"{resource}ByKfId"]["id"]
# Now try to get node by the relay id
query = f'{{{resource}(id: "{node_id}") {{ id }} }}'
resp = client.post(
"/graphql", data={"query": query}, content_type="application/json"
)
# Should get back the node with id if expected, None if not
if allowed:
assert resp.json()["data"][resource]["id"] == node_id
else:
assert resp.json()["errors"][0]["message"] == "Not allowed"
@pytest.mark.parametrize("resource", ["study", "file", "version"])
@pytest.mark.parametrize(
"user_group,allowed",
[
("Administrators", True),
("Services", True),
("Developers", True),
("Investigators", True),
("Bioinformatics", True),
(None, False),
],
)
def test_get_resource_by_kf_id(
db, clients, versions, resource, user_group, allowed
):
"""
Test that resource may be retrieved by kfId
- Will return resource if admin
- Should return resource if user who is parto of study
- Should return None if user who is not part of study
- Should return None if not an authenticated user
"""
# Select client based on user type
client = clients.get(user_group)
study, file, version = versions
user = User.objects.filter(groups__name=user_group).first()
Membership(collaborator=user, study=study).save()
# Get the id of the resource we're testing for
kf_id = {
"study": study.kf_id,
"file": file.kf_id,
"version": version.kf_id,
}[resource]
# Test that a study may be retreived by kf_id
query = f'{{{resource}ByKfId(kfId: "{kf_id}") {{ id kfId }} }}'
resp = client.post(
"/graphql", data={"query": query}, content_type="application/json"
)
assert f"{resource}ByKfId" in resp.json()["data"]
# Will return size if authenticated, None if not
if allowed:
assert "kfId" in resp.json()["data"][f"{resource}ByKfId"]
else:
assert resp.json()["errors"][0]["message"] == "Not allowed"
| 32.203125
| 74
| 0.640708
|
7f997142897fab82f79b1141697eed4a1abb9ce6
| 14,400
|
py
|
Python
|
senha_main.py
|
Joao-vap/Jogo_senha-em-Python
|
c77f0de177eb6b98341da6da0a3f774cdde4be81
|
[
"MIT"
] | null | null | null |
senha_main.py
|
Joao-vap/Jogo_senha-em-Python
|
c77f0de177eb6b98341da6da0a3f774cdde4be81
|
[
"MIT"
] | null | null | null |
senha_main.py
|
Joao-vap/Jogo_senha-em-Python
|
c77f0de177eb6b98341da6da0a3f774cdde4be81
|
[
"MIT"
] | null | null | null |
import pygame
from pygame import display, event
from pygame import image, transform
import os
import random
from time import sleep
DIM_TELA_Y = 512
DIM_TELA_X = 1024
DIM_INICIAR = [256, 64]
DIM_INST = [138, 50]
BOTOES = [x for x in os.listdir('cores') if x[-3:].lower() == 'png']
rodando = True
injogo = True
# definindo a senha
senha = []
for n in range(4):
x = random.randint(1, 6)
senha.append(x)
# iniciando a tela para o jogo
pygame.init()
display.set_caption("Senha")
tela = display.set_mode((DIM_TELA_X, DIM_TELA_Y))
inicial = transform.scale(image.load('Tela_inicial/Fundoinicial.png'), (DIM_TELA_X, DIM_TELA_Y))
botao_init = transform.scale(image.load('Tela_inicial/iniciar.png'), DIM_INICIAR)
botao_inst = transform.scale(image.load('Tela_inicial/instrucoes.png'), DIM_INST)
coord_init = [(DIM_TELA_X / 2) - 128, (DIM_TELA_Y / 2) + 34]
coord_inst = [30, 450]
coord_voltar = coord_inst
tela.blit(inicial, (0, 0))
tela.blit(botao_init, coord_init)
tela.blit(botao_inst, coord_inst)
def checar_iniciar(x, y):
if coord_init[0] < x < (coord_init[0] + DIM_INICIAR[0]):
if coord_init[1] < y < (coord_init[1] + DIM_INICIAR[1]):
return True
else:
return False
def checar_inst(x, y):
if coord_inst[0] < x < (coord_inst[0] + DIM_INST[0]):
if coord_init[1] < y < (coord_inst[1] + DIM_INST[1]):
return True
else:
return False
def checar_voltar(x, y):
if coord_inst[0] < x < (coord_inst[0] + DIM_INST[0]):
if coord_init[1] < y < (coord_inst[1] + DIM_INST[1]):
return True
else:
return False
def checar_respostas(localidade):
vetorsenha = [0, 0, 0, 0, 0, 0]
acertos = []
for w in senha:
vetorsenha[w - 1] += 1
for numero in range(4):
if len(localidade[rodada]) < 4:
for amenos in range(4 - len(localidade[rodada])):
localidade[rodada].append(0)
for n in range(4):
if localidade[rodada][n] == senha[n]:
acertos.append(1)
vetorsenha[localidade[rodada][n] - 1] = vetorsenha[localidade[rodada][n] - 1] - 1
for m in range(4):
for x in range(4):
if localidade[rodada][m] == senha[x] and vetorsenha[localidade[rodada][m] - 1] != 0 and localidade[rodada][m] != senha[m]:
acertos.append(0)
vetorsenha[localidade[rodada][m] - 1] = vetorsenha[localidade[rodada][m] - 1] - 1
break
zeros = len(acertos) - sum(acertos)
s = sum(acertos)
return [zeros, s]
def atualizar_telas_certos(n):
acertos = transform.scale(image.load('acertos/' + str(n[0]) + str(n[1]) + '.png'), (50, 50))
tela.blit(acertos, (50 + (rodada * 86), 330))
display.flip()
def partytime():
felps = transform.scale(image.load('acertos/felipe.png'), (120, 60))
for x in range(20):
for z in range(15):
tela.blit(felps, (x * 95, 40 * z))
display.flip()
sleep(3)
setup_tabuleiro()
def setup_tabuleiro():
tela_jogo = transform.scale(image.load('Tela_inicial/tela_jogo.png'), (DIM_TELA_X, DIM_TELA_Y))
tela.blit(tela_jogo, (0, 0))
lista = ['testar.png', 'apagar.png', 'Voltar_inst.png']
for botao in BOTOES:
lista.append(botao)
contador_botoes = [320, 420]
indices = [0, 0, 0, 0, 0, 0, 0, 0, 0]
i = 1
index = 0
outros = False
for item in lista:
if 0 == (i % 4):
outros = True
contador_botoes = [0, 435]
i = 1
if outros:
bot = transform.scale(image.load('cores/' + item), (40, 40))
contador_botoes[0] = contador_botoes[0] + 50
tela.blit(bot, (contador_botoes[0], contador_botoes[1]))
indices[index] = [[contador_botoes[0], contador_botoes[0] + 40],
[contador_botoes[1], contador_botoes[1] + 40]]
i = 1
else:
botao = transform.scale(image.load('Tela_inicial/' + item), (150, 60))
contador_botoes[0] = contador_botoes[0] + 160
tela.blit(botao, (contador_botoes[0], contador_botoes[1]))
indices[index] = [[contador_botoes[0], contador_botoes[0] + 150],
[contador_botoes[1], contador_botoes[1] + 60]]
i += 1
index += 1
display.flip()
def mostrar():
Aurelio = ['verde', 'roxo', 'amarelo', 'azul', 'vermelho', 'cinza']
apag = transform.scale(image.load('Tela_inicial/Apagar.png'), (50, 250))
tela.blit(apag, (930, 40))
display.flip()
for n in range(len(senha)):
correto = transform.scale(image.load('cores/' + Aurelio[senha[n]-1] + '.png'), (40, 40))
tela.blit(correto, (930, 50 + (65 * n)))
display.flip()
sleep(3)
setup_tabuleiro()
# loop principal
while rodando:
evento_rodada = event.get()
for e in evento_rodada:
if e.type == pygame.QUIT:
rodando = False
if e.type == pygame.MOUSEBUTTONDOWN:
mouse_x, mouse_y = pygame.mouse.get_pos()
if checar_iniciar(mouse_x, mouse_y):
jogando = True
tela_jogo = transform.scale(image.load('Tela_inicial/tela_jogo.png'), (DIM_TELA_X, DIM_TELA_Y))
tela.blit(tela_jogo, (0, 0))
lista = ['testar.png', 'apagar.png', 'Voltar_inst.png']
for botao in BOTOES:
lista.append(botao)
contador_botoes = [320, 420]
indices = [0, 0, 0, 0, 0, 0, 0, 0, 0]
i = 1
index = 0
outros = False
for item in lista:
if 0 == (i % 4):
outros = True
contador_botoes = [0, 435]
i = 1
if outros:
bot = transform.scale(image.load('cores/' + item), (40, 40))
contador_botoes[0] = contador_botoes[0] + 50
tela.blit(bot, (contador_botoes[0], contador_botoes[1]))
indices[index] = [[contador_botoes[0], contador_botoes[0] + 40],
[contador_botoes[1], contador_botoes[1] + 40]]
i = 1
else:
botao = transform.scale(image.load('Tela_inicial/' + item), (150, 60))
contador_botoes[0] = contador_botoes[0] + 160
tela.blit(botao, (contador_botoes[0], contador_botoes[1]))
indices[index] = [[contador_botoes[0], contador_botoes[0] + 150],
[contador_botoes[1], contador_botoes[1] + 60]]
i += 1
index += 1
display.flip()
while jogando:
localidade = []
for i in range(10):
l = [0]
localidade.append(l)
out = False
for rodada in range(10):
localidade[rodada] = []
if out:
break
ciclo = True
while ciclo:
evento_rodada = event.get()
for e in evento_rodada:
if e.type == pygame.QUIT:
rodando = False
jogando = False
injogo = False
ciclo = False
out = True
break
if e.type == pygame.MOUSEBUTTONDOWN:
mouse_x, mouse_y = pygame.mouse.get_pos()
qual = 1
for item in indices:
if item[0][0] < mouse_x < item[0][1]:
if item[1][0] < mouse_y < item[1][1]:
if qual == 1:
ciclo = False
break
elif qual == 2:
apag = transform.scale(image.load('Tela_inicial/Apagar.png'), (30, 240))
tela.blit(apag, (60 + (rodada * 86), 50))
localidade[rodada].append(1)
display.flip()
localidade[rodada] = []
elif qual == 3:
jogando = False
ciclo = False
out = True
break
elif qual == 4:
if len(localidade[rodada]) < 4:
ver = transform.scale(image.load('cores/verde.png'), (30, 30))
tela.blit(ver, (60 + (rodada * 86), 50 + (len(localidade[rodada]) * 65)))
localidade[rodada].append(1)
display.flip()
elif qual == 5:
if len(localidade[rodada]) < 4:
ver = transform.scale(image.load('cores/roxo.png'), (30, 30))
tela.blit(ver, (60 + (rodada * 86), 50 + (len(localidade[rodada]) * 65)))
localidade[rodada].append(2)
display.flip()
elif qual == 6:
if len(localidade[rodada]) < 4:
ver = transform.scale(image.load('cores/amarelo.png'), (30, 30))
tela.blit(ver, (60 + (rodada * 86), 50 + (len(localidade[rodada]) * 65)))
localidade[rodada].append(3)
display.flip()
elif qual == 7:
if len(localidade[rodada]) < 4:
ver = transform.scale(image.load('cores/azul.png'), (30, 30))
tela.blit(ver, (60 + (rodada * 86), 50 + (len(localidade[rodada]) * 65)))
localidade[rodada].append(4)
display.flip()
elif qual == 8:
if len(localidade[rodada]) < 4:
ver = transform.scale(image.load('cores/vermelho.png'), (30, 30))
tela.blit(ver, (60 + (rodada * 86), 50 + (len(localidade[rodada]) * 65)))
localidade[rodada].append(5)
display.flip()
else:
if len(localidade[rodada]) < 4:
ver = transform.scale(image.load('cores/cinza.png'), (30, 30))
tela.blit(ver, (60 + (rodada * 86), 50 + (len(localidade[rodada]) * 65)))
localidade[rodada].append(6)
display.flip()
qual += 1
if not ciclo:
n = checar_respostas(localidade)
atualizar_telas_certos(n)
if len(localidade[rodada]) < 4:
ciclo = True
if n[1] == 4:
partytime()
break
if rodada == 9:
mostrar()
display.flip()
if rodando:
tela.blit(inicial, (0, 0))
tela.blit(botao_init, coord_init)
tela.blit(botao_inst, coord_inst)
if checar_inst(mouse_x, mouse_y) and injogo:
instrucao = True
inst_texto = transform.scale(image.load('Tela_inicial/instrucoes_texto.png'), (DIM_TELA_X, DIM_TELA_Y))
voltar_bot = transform.scale(image.load('Tela_inicial/Voltar_inst.png'), DIM_INST)
tela.blit(inst_texto, (0, 0))
tela.blit(voltar_bot, coord_voltar)
display.flip()
while instrucao:
evento_rodada = event.get()
for e in evento_rodada:
if e.type == pygame.QUIT:
instrucao = False
rodando = False
if e.type == pygame.MOUSEBUTTONDOWN:
mouse_x, mouse_y = pygame.mouse.get_pos()
if checar_voltar(mouse_x, mouse_y):
instrucao = False
tela.blit(inicial, (0, 0))
tela.blit(botao_init, coord_init)
tela.blit(botao_inst, coord_inst)
display.flip()
print('Obrigado por jogar!')
| 37.894737
| 134
| 0.416597
|
2dfe8c9b4971547ea426c5fd3db92d9a6cf3575b
| 4,063
|
py
|
Python
|
src/name.py
|
silent1mezzo/lightsaber
|
e470be7fb84b810fe846ff0ede78d06bf69cd5e3
|
[
"MIT"
] | 13
|
2020-08-12T12:04:19.000Z
|
2022-03-12T03:53:07.000Z
|
src/name.py
|
silent1mezzo/lightsaber
|
e470be7fb84b810fe846ff0ede78d06bf69cd5e3
|
[
"MIT"
] | 46
|
2020-09-03T06:00:18.000Z
|
2022-03-25T10:03:53.000Z
|
src/name.py
|
silent1mezzo/lightsaber
|
e470be7fb84b810fe846ff0ede78d06bf69cd5e3
|
[
"MIT"
] | 3
|
2021-08-11T19:12:37.000Z
|
2021-11-09T15:19:59.000Z
|
NAMES = [
"Sistry Harper",
"Montileu Parlayy",
"Anna Orion",
"Baniss Woodle",
"Leten Swiftflight",
"Dre Folen",
"Aslik Zhagel",
"Andros Jien",
"Sahdra Chan",
"Saalia Sunfell",
"Lya Cenvax",
"Jauhzmynn Dymos",
"Branada Picus",
"Faris Hentz",
"Cora Josto",
"Riyan Norduin",
"Eranas Aralun",
"Riaca Coven",
"Cuhan Ackdool",
"Gav A'kazz",
"Cid Sunfell",
"Tobias Hemor",
"Harloth Voss",
"Iago Samick",
"Torin Mald",
"Leran Lankost",
"Raena Kyle",
"Gerd Bardok",
"Tarja Teraah",
"Myn Marr",
"Leia Felan",
"Leran Prine",
"Miranda Prine",
"Sena Saxon",
"Alejandro Tavik",
"Kavi Krieg",
"Mulokhai Lefex",
"Zam Reu Lai",
"Irman Driet",
"Aerith Tyree",
"Gaetana Morker",
"Ward Roosh",
"Hoosha Ivalice",
"Malius Leven",
"Crale Sato",
"Adram Starkos",
"Adoniram Brower",
"Allen Maider",
"Tavion Tierney",
"Darius Foreman",
"Lynorri Drayven",
"Graeme Adras",
"Latia Chias",
"Attila Cee",
"Monte Hethna",
"Nuoak Pareja",
"Shandra Chode",
"Dorn Hennis",
"Markus Ossaki",
"Odona Olreb",
"Chaz Juall",
"Rance Kalranoos",
"Azul Raeth",
"Arnis Sinite",
"Tam Drakar",
"Hiwatta Sol",
"Ral Daagh",
"Raja Herand",
"Dorian Uhdea",
"Sen Nere",
"Obdulla Li Anek",
"Tash Tershin",
"Dayana Peoly",
"Victor Toarinar",
"Marion Serat",
"Fias Irokini",
"Keshani Terallo",
"Lobacca Daklan",
"Dylan Doha",
"Zeb Soduntis",
"Hopp Zamba",
"Fallah Mattac",
"Braxis Tanoor",
"Deel Pavish",
"Lania Nabe",
"Jephe Kovani",
"Mayli Diruno",
"Diego Prine",
"Mowha Cavi",
"FidFidFidpiuar",
"Taima Ryen",
"Numm Aldan",
"Naria Chupa-Pau",
"Rhel Malo",
"Tanaris Latt",
"Rubuta Myrishi",
"Pom Qorbin",
"Duke Ferndike",
"Wile Lin",
"Luna Tsomcren",
"Riaca Vest",
"Gyra Ananillka",
"Plo Deenia",
"Herron Sekel",
"Vansic Webb",
"Katalana Goldenbur",
"Elias Gosular",
"Horchoba Cynthisa",
"Zeta Chanden",
"Tormax Solon",
"Melfina Soto",
"Simone Natas",
"Dylan Lerann",
"Dashara Endel",
"Nuoak Leqarna",
"Guld Norduin",
"Aalto Archer",
"Larek Myrishi",
"Nora Hunew",
"Rayf Pic",
"Fable Gadreel",
"Tana Moreland",
"Tac Mattac",
"Lannera Sherrol",
"Alysia Brock",
"Zhymmia Renz",
"Zef Zendu",
"Kymber Secura",
"Elayne Zelona",
"Cid Serat",
"Arathilion Pavish",
"Van Rekkon",
"Rel Kast",
"Leo Prower",
"Iving Onasi",
"Allada Yalawari",
"Banol Nidor",
"Fias Ragnos",
"Aaron Drea",
"Erisi Smague",
"Mohebbi Roscoe",
"Shri-Lan Roscoe",
"Rosh Deece",
"Gorden Fabiszak",
"Nuray Maddocks",
"Tisha Surtoske",
"Alaric Rabor",
"Rogmi Devin",
"Stephen Feraan",
"Aimee Firin",
"La Kai",
"Laatl Kai",
"Jem Klar",
"Teekon Oligard",
"Tavion Katana",
"Aerex Ellan",
"Nora Tahlee",
"Delmon Allerti",
"Paydon Tahlee",
"Pencron Klar",
"Fiolla Lungthyme",
"Dostoga Pryde",
"Edbr Fodao",
"Guy Janin",
"Nadriandur Sallaros",
"Syal Gestahl",
"Jix Vullen",
"Sigea Dimmias",
"Dash Dimmias",
"Pescer Vullen",
"Lesa Pryde",
"Sato Sekel",
"Ettal Sekel",
"Daniel Korpil",
"Mando Kappa",
"Tuija Kappa",
"Obath Saul",
"Titus Saul",
"Nacene Koth",
"Piodas Dabrini",
"Lersen Trenken",
"Kwyntehst Corse",
"Jillian Athan",
"Markus Svung",
"Selan Danner",
"Lizzy Tsark",
"Vella Goldenbur",
"Dax Joben",
"Zerz Fleetfire",
"Davin Hwang",
"Eleian Devin",
"Zu Guga",
"Lando Tull",
"Randyl Tarwin",
"Orus Almiston",
"Ethan Farthen",
"Marclonus Nise",
"Gant Novar",
"Jakkara Rayley",
"Strago Valentine",
"Elayne Exibil",
"Reeve Luss",
"Tak Pesqui",
]
| 19.723301
| 26
| 0.547625
|
406c217b2848b56756a72c1e86a54833b9f74493
| 18,751
|
py
|
Python
|
django/http/__init__.py
|
alex/django-old
|
6f964c8f03e5d25c9e36898a001c8463f82fbb81
|
[
"BSD-3-Clause"
] | 2
|
2015-11-05T06:07:13.000Z
|
2019-01-04T07:35:59.000Z
|
django/http/__init__.py
|
alex/django-old
|
6f964c8f03e5d25c9e36898a001c8463f82fbb81
|
[
"BSD-3-Clause"
] | null | null | null |
django/http/__init__.py
|
alex/django-old
|
6f964c8f03e5d25c9e36898a001c8463f82fbb81
|
[
"BSD-3-Clause"
] | null | null | null |
import datetime
import os
import re
import time
from Cookie import BaseCookie, SimpleCookie, CookieError
from pprint import pformat
from urllib import urlencode
from urlparse import urljoin
try:
# The mod_python version is more efficient, so try importing it first.
from mod_python.util import parse_qsl
except ImportError:
try:
# Python 2.6 and greater
from urlparse import parse_qsl
except ImportError:
# Python 2.5, 2.4. Works on Python 2.6 but raises
# PendingDeprecationWarning
from cgi import parse_qsl
from django.utils.datastructures import MultiValueDict, ImmutableList
from django.utils.encoding import smart_str, iri_to_uri, force_unicode
from django.utils.http import cookie_date
from django.http.multipartparser import MultiPartParser
from django.conf import settings
from django.core.files import uploadhandler
from utils import *
RESERVED_CHARS="!*'();:@&=+$,/?%#[]"
absolute_http_url_re = re.compile(r"^https?://", re.I)
class Http404(Exception):
pass
class HttpRequest(object):
"""A basic HTTP request."""
# The encoding used in GET/POST dicts. None means use default setting.
_encoding = None
_upload_handlers = []
def __init__(self):
self.GET, self.POST, self.COOKIES, self.META, self.FILES = {}, {}, {}, {}, {}
self.path = ''
self.path_info = ''
self.method = None
def __repr__(self):
return '<HttpRequest\nGET:%s,\nPOST:%s,\nCOOKIES:%s,\nMETA:%s>' % \
(pformat(self.GET), pformat(self.POST), pformat(self.COOKIES),
pformat(self.META))
def get_host(self):
"""Returns the HTTP host using the environment or request headers."""
# We try three options, in order of decreasing preference.
if 'HTTP_X_FORWARDED_HOST' in self.META:
host = self.META['HTTP_X_FORWARDED_HOST']
elif 'HTTP_HOST' in self.META:
host = self.META['HTTP_HOST']
else:
# Reconstruct the host using the algorithm from PEP 333.
host = self.META['SERVER_NAME']
server_port = str(self.META['SERVER_PORT'])
if server_port != (self.is_secure() and '443' or '80'):
host = '%s:%s' % (host, server_port)
return host
def get_full_path(self):
return ''
def build_absolute_uri(self, location=None):
"""
Builds an absolute URI from the location and the variables available in
this request. If no location is specified, the absolute URI is built on
``request.get_full_path()``.
"""
if not location:
location = self.get_full_path()
if not absolute_http_url_re.match(location):
current_uri = '%s://%s%s' % (self.is_secure() and 'https' or 'http',
self.get_host(), self.path)
location = urljoin(current_uri, location)
return iri_to_uri(location)
def is_secure(self):
return os.environ.get("HTTPS") == "on"
def is_ajax(self):
return self.META.get('HTTP_X_REQUESTED_WITH') == 'XMLHttpRequest'
def _set_encoding(self, val):
"""
Sets the encoding used for GET/POST accesses. If the GET or POST
dictionary has already been created, it is removed and recreated on the
next access (so that it is decoded correctly).
"""
self._encoding = val
if hasattr(self, '_get'):
del self._get
if hasattr(self, '_post'):
del self._post
def _get_encoding(self):
return self._encoding
encoding = property(_get_encoding, _set_encoding)
def _initialize_handlers(self):
self._upload_handlers = [uploadhandler.load_handler(handler, self)
for handler in settings.FILE_UPLOAD_HANDLERS]
def _set_upload_handlers(self, upload_handlers):
if hasattr(self, '_files'):
raise AttributeError("You cannot set the upload handlers after the upload has been processed.")
self._upload_handlers = upload_handlers
def _get_upload_handlers(self):
if not self._upload_handlers:
# If thre are no upload handlers defined, initialize them from settings.
self._initialize_handlers()
return self._upload_handlers
upload_handlers = property(_get_upload_handlers, _set_upload_handlers)
def parse_file_upload(self, META, post_data):
"""Returns a tuple of (POST QueryDict, FILES MultiValueDict)."""
self.upload_handlers = ImmutableList(
self.upload_handlers,
warning = "You cannot alter upload handlers after the upload has been processed."
)
parser = MultiPartParser(META, post_data, self.upload_handlers, self.encoding)
return parser.parse()
class QueryDict(MultiValueDict):
"""
A specialized MultiValueDict that takes a query string when initialized.
This is immutable unless you create a copy of it.
Values retrieved from this class are converted from the given encoding
(DEFAULT_CHARSET by default) to unicode.
"""
# These are both reset in __init__, but is specified here at the class
# level so that unpickling will have valid values
_mutable = True
_encoding = None
def __init__(self, query_string, mutable=False, encoding=None):
MultiValueDict.__init__(self)
if not encoding:
# *Important*: do not import settings any earlier because of note
# in core.handlers.modpython.
from django.conf import settings
encoding = settings.DEFAULT_CHARSET
self.encoding = encoding
for key, value in parse_qsl((query_string or ''), True): # keep_blank_values=True
self.appendlist(force_unicode(key, encoding, errors='replace'),
force_unicode(value, encoding, errors='replace'))
self._mutable = mutable
def _get_encoding(self):
if self._encoding is None:
# *Important*: do not import settings at the module level because
# of the note in core.handlers.modpython.
from django.conf import settings
self._encoding = settings.DEFAULT_CHARSET
return self._encoding
def _set_encoding(self, value):
self._encoding = value
encoding = property(_get_encoding, _set_encoding)
def _assert_mutable(self):
if not self._mutable:
raise AttributeError("This QueryDict instance is immutable")
def __setitem__(self, key, value):
self._assert_mutable()
key = str_to_unicode(key, self.encoding)
value = str_to_unicode(value, self.encoding)
MultiValueDict.__setitem__(self, key, value)
def __delitem__(self, key):
self._assert_mutable()
super(QueryDict, self).__delitem__(key)
def __copy__(self):
result = self.__class__('', mutable=True, encoding=self.encoding)
for key, value in dict.items(self):
dict.__setitem__(result, key, value)
return result
def __deepcopy__(self, memo):
import django.utils.copycompat as copy
result = self.__class__('', mutable=True, encoding=self.encoding)
memo[id(self)] = result
for key, value in dict.items(self):
dict.__setitem__(result, copy.deepcopy(key, memo), copy.deepcopy(value, memo))
return result
def setlist(self, key, list_):
self._assert_mutable()
key = str_to_unicode(key, self.encoding)
list_ = [str_to_unicode(elt, self.encoding) for elt in list_]
MultiValueDict.setlist(self, key, list_)
def setlistdefault(self, key, default_list=()):
self._assert_mutable()
if key not in self:
self.setlist(key, default_list)
return MultiValueDict.getlist(self, key)
def appendlist(self, key, value):
self._assert_mutable()
key = str_to_unicode(key, self.encoding)
value = str_to_unicode(value, self.encoding)
MultiValueDict.appendlist(self, key, value)
def update(self, other_dict):
self._assert_mutable()
f = lambda s: str_to_unicode(s, self.encoding)
if hasattr(other_dict, 'lists'):
for key, valuelist in other_dict.lists():
for value in valuelist:
MultiValueDict.update(self, {f(key): f(value)})
else:
d = dict([(f(k), f(v)) for k, v in other_dict.items()])
MultiValueDict.update(self, d)
def pop(self, key, *args):
self._assert_mutable()
return MultiValueDict.pop(self, key, *args)
def popitem(self):
self._assert_mutable()
return MultiValueDict.popitem(self)
def clear(self):
self._assert_mutable()
MultiValueDict.clear(self)
def setdefault(self, key, default=None):
self._assert_mutable()
key = str_to_unicode(key, self.encoding)
default = str_to_unicode(default, self.encoding)
return MultiValueDict.setdefault(self, key, default)
def copy(self):
"""Returns a mutable copy of this object."""
return self.__deepcopy__({})
def urlencode(self):
output = []
for k, list_ in self.lists():
k = smart_str(k, self.encoding)
output.extend([urlencode({k: smart_str(v, self.encoding)}) for v in list_])
return '&'.join(output)
class CompatCookie(SimpleCookie):
"""
Cookie class that handles some issues with browser compatibility.
"""
def value_encode(self, val):
# Some browsers do not support quoted-string from RFC 2109,
# including some versions of Safari and Internet Explorer.
# These browsers split on ';', and some versions of Safari
# are known to split on ', '. Therefore, we encode ';' and ','
# SimpleCookie already does the hard work of encoding and decoding.
# It uses octal sequences like '\\012' for newline etc.
# and non-ASCII chars. We just make use of this mechanism, to
# avoid introducing two encoding schemes which would be confusing
# and especially awkward for javascript.
# NB, contrary to Python docs, value_encode returns a tuple containing
# (real val, encoded_val)
val, encoded = super(CompatCookie, self).value_encode(val)
encoded = encoded.replace(";", "\\073").replace(",","\\054")
# If encoded now contains any quoted chars, we need double quotes
# around the whole string.
if "\\" in encoded and not encoded.startswith('"'):
encoded = '"' + encoded + '"'
return val, encoded
def parse_cookie(cookie):
if cookie == '':
return {}
if not isinstance(cookie, BaseCookie):
try:
c = CompatCookie()
c.load(cookie)
except CookieError:
# Invalid cookie
return {}
else:
c = cookie
cookiedict = {}
for key in c.keys():
cookiedict[key] = c.get(key).value
return cookiedict
class BadHeaderError(ValueError):
pass
class HttpResponse(object):
"""A basic HTTP response, with content and dictionary-accessed headers."""
status_code = 200
def __init__(self, content='', mimetype=None, status=None,
content_type=None):
# _headers is a mapping of the lower-case name to the original case of
# the header (required for working with legacy systems) and the header
# value. Both the name of the header and its value are ASCII strings.
self._headers = {}
self._charset = settings.DEFAULT_CHARSET
if mimetype:
content_type = mimetype # For backwards compatibility
if not content_type:
content_type = "%s; charset=%s" % (settings.DEFAULT_CONTENT_TYPE,
self._charset)
if not isinstance(content, basestring) and hasattr(content, '__iter__'):
self._container = content
self._is_string = False
else:
self._container = [content]
self._is_string = True
self.cookies = CompatCookie()
if status:
self.status_code = status
self['Content-Type'] = content_type
def __str__(self):
"""Full HTTP message, including headers."""
return '\n'.join(['%s: %s' % (key, value)
for key, value in self._headers.values()]) \
+ '\n\n' + self.content
def _convert_to_ascii(self, *values):
"""Converts all values to ascii strings."""
for value in values:
if isinstance(value, unicode):
try:
value = value.encode('us-ascii')
except UnicodeError, e:
e.reason += ', HTTP response headers must be in US-ASCII format'
raise
else:
value = str(value)
if '\n' in value or '\r' in value:
raise BadHeaderError("Header values can't contain newlines (got %r)" % (value))
yield value
def __setitem__(self, header, value):
header, value = self._convert_to_ascii(header, value)
self._headers[header.lower()] = (header, value)
def __delitem__(self, header):
try:
del self._headers[header.lower()]
except KeyError:
pass
def __getitem__(self, header):
return self._headers[header.lower()][1]
def has_header(self, header):
"""Case-insensitive check for a header."""
return self._headers.has_key(header.lower())
__contains__ = has_header
def items(self):
return self._headers.values()
def get(self, header, alternate):
return self._headers.get(header.lower(), (None, alternate))[1]
def set_cookie(self, key, value='', max_age=None, expires=None, path='/',
domain=None, secure=False):
"""
Sets a cookie.
``expires`` can be a string in the correct format or a
``datetime.datetime`` object in UTC. If ``expires`` is a datetime
object then ``max_age`` will be calculated.
"""
self.cookies[key] = value
if expires is not None:
if isinstance(expires, datetime.datetime):
delta = expires - expires.utcnow()
# Add one second so the date matches exactly (a fraction of
# time gets lost between converting to a timedelta and
# then the date string).
delta = delta + datetime.timedelta(seconds=1)
# Just set max_age - the max_age logic will set expires.
expires = None
max_age = max(0, delta.days * 86400 + delta.seconds)
else:
self.cookies[key]['expires'] = expires
if max_age is not None:
self.cookies[key]['max-age'] = max_age
# IE requires expires, so set it if hasn't been already.
if not expires:
self.cookies[key]['expires'] = cookie_date(time.time() +
max_age)
if path is not None:
self.cookies[key]['path'] = path
if domain is not None:
self.cookies[key]['domain'] = domain
if secure:
self.cookies[key]['secure'] = True
def delete_cookie(self, key, path='/', domain=None):
self.set_cookie(key, max_age=0, path=path, domain=domain,
expires='Thu, 01-Jan-1970 00:00:00 GMT')
def _get_content(self):
if self.has_header('Content-Encoding'):
return ''.join(self._container)
return smart_str(''.join(self._container), self._charset)
def _set_content(self, value):
self._container = [value]
self._is_string = True
content = property(_get_content, _set_content)
def __iter__(self):
self._iterator = iter(self._container)
return self
def next(self):
chunk = self._iterator.next()
if isinstance(chunk, unicode):
chunk = chunk.encode(self._charset)
return str(chunk)
def close(self):
if hasattr(self._container, 'close'):
self._container.close()
# The remaining methods partially implement the file-like object interface.
# See http://docs.python.org/lib/bltin-file-objects.html
def write(self, content):
if not self._is_string:
raise Exception("This %s instance is not writable" % self.__class__)
self._container.append(content)
def flush(self):
pass
def tell(self):
if not self._is_string:
raise Exception("This %s instance cannot tell its position" % self.__class__)
return sum([len(chunk) for chunk in self._container])
class HttpResponseRedirect(HttpResponse):
status_code = 302
def __init__(self, redirect_to):
HttpResponse.__init__(self)
self['Location'] = iri_to_uri(redirect_to)
class HttpResponsePermanentRedirect(HttpResponse):
status_code = 301
def __init__(self, redirect_to):
HttpResponse.__init__(self)
self['Location'] = iri_to_uri(redirect_to)
class HttpResponseNotModified(HttpResponse):
status_code = 304
class HttpResponseBadRequest(HttpResponse):
status_code = 400
class HttpResponseNotFound(HttpResponse):
status_code = 404
class HttpResponseForbidden(HttpResponse):
status_code = 403
class HttpResponseNotAllowed(HttpResponse):
status_code = 405
def __init__(self, permitted_methods):
HttpResponse.__init__(self)
self['Allow'] = ', '.join(permitted_methods)
class HttpResponseGone(HttpResponse):
status_code = 410
def __init__(self, *args, **kwargs):
HttpResponse.__init__(self, *args, **kwargs)
class HttpResponseServerError(HttpResponse):
status_code = 500
def __init__(self, *args, **kwargs):
HttpResponse.__init__(self, *args, **kwargs)
# A backwards compatible alias for HttpRequest.get_host.
def get_host(request):
return request.get_host()
# It's neither necessary nor appropriate to use
# django.utils.encoding.smart_unicode for parsing URLs and form inputs. Thus,
# this slightly more restricted function.
def str_to_unicode(s, encoding):
"""
Converts basestring objects to unicode, using the given encoding. Illegally
encoded input characters are replaced with Unicode "unknown" codepoint
(\ufffd).
Returns any non-basestring objects without change.
"""
if isinstance(s, str):
return unicode(s, encoding, 'replace')
else:
return s
| 35.580645
| 107
| 0.627327
|
629a33dad94b74044a1561f0363c41497213d7fb
| 135
|
py
|
Python
|
pyActionRec/__init__.py
|
Tikquuss/anet2016-cuhk
|
a742e1686fbeef8e35b7d792542f3b89aa46fa24
|
[
"BSD-2-Clause"
] | 1
|
2020-10-28T08:24:17.000Z
|
2020-10-28T08:24:17.000Z
|
pyActionRec/__init__.py
|
Tikquuss/anet2016-cuhk
|
a742e1686fbeef8e35b7d792542f3b89aa46fa24
|
[
"BSD-2-Clause"
] | null | null | null |
pyActionRec/__init__.py
|
Tikquuss/anet2016-cuhk
|
a742e1686fbeef8e35b7d792542f3b89aa46fa24
|
[
"BSD-2-Clause"
] | null | null | null |
import os
import sys
sys.path.append(os.path.join(os.environ['ANET_HOME'], "pyActionRec"))
from config import ANET_CFG
import anet_db
| 19.285714
| 69
| 0.785185
|
14bb27a4f4c9c220858212df0cad15aefe388d1c
| 1,769
|
py
|
Python
|
utils/helpers.py
|
charlesxjyang/DeepGeyser
|
59f54c67667800f091d7af1805c04bbc36c7624b
|
[
"Apache-2.0"
] | null | null | null |
utils/helpers.py
|
charlesxjyang/DeepGeyser
|
59f54c67667800f091d7af1805c04bbc36c7624b
|
[
"Apache-2.0"
] | null | null | null |
utils/helpers.py
|
charlesxjyang/DeepGeyser
|
59f54c67667800f091d7af1805c04bbc36c7624b
|
[
"Apache-2.0"
] | null | null | null |
import datetime
from keras import models
import pandas as pd
from sklearn.externals import joblib
#loads .tsv file and pickles it in data folder, returns panda dataframes
def load_tsv(filename:str):
#make sure in data folder
if 'tsv' not in filename:
filename = filename + '.tsv'
if 'data' not in filename:
if 'eruption' in filename:
filename = '../../data/eruption_data/' + filename
if 'Logger' in filename:
filename = '../../data/logger_data/' + filename
try:
df = pd.read_table(filename,sep='\t')
except:
#catch parser error
df = pd.read_table(filename,sep='\t',engine='python')
#save pickle to data folder
save_filename = filename[:-3] + 'pkl'
df.to_pickle(save_filename)
return df
def unix_to_datetime(unix_epoch_time):
return datetime.datetime.fromtimestamp(unix_epoch_time)
def datetime_to_unix(datetime):
return datetime((datetime - datetime(1970, 1, 1))).total_seconds()
def save_keras_model(model,filename):
if 'h5' not in filename:
filename = filename + '.h5'
model.save('../../data/saved_models/' + filename)
def save_sklearn_model(model,filename):
if 'joblib' not in filename:
filename = filename + '.joblib'
if 'data' not in filename:
filename = '../../data/saved_models/' + filename
joblib.dump(model,filename)
def load_model(filename:str):
#assume only filename, no rel. path specified
if 'data' not in filename:
filename = '../../data/saved_models/' + filename
return models.load_model(filename)
def save_np_array(filename:str,arr):
if 'data' not in filename:
filename = '../../data/saved_predictions/' + filename
return np.save(filename,arr)
| 32.759259
| 72
| 0.661391
|
cdfecc0f536539f965bb5c555f0f12d77739f2be
| 132,268
|
py
|
Python
|
numpy/core/tests/test_numeric.py
|
touqir14/numpy
|
85e2ce980fa4883c1add983be924d5e16d3723ec
|
[
"BSD-3-Clause"
] | null | null | null |
numpy/core/tests/test_numeric.py
|
touqir14/numpy
|
85e2ce980fa4883c1add983be924d5e16d3723ec
|
[
"BSD-3-Clause"
] | null | null | null |
numpy/core/tests/test_numeric.py
|
touqir14/numpy
|
85e2ce980fa4883c1add983be924d5e16d3723ec
|
[
"BSD-3-Clause"
] | null | null | null |
import sys
import warnings
import itertools
import platform
import pytest
import math
from decimal import Decimal
import numpy as np
from numpy.core import umath
from numpy.random import rand, randint, randn
from numpy.testing import (
assert_, assert_equal, assert_raises, assert_raises_regex,
assert_array_equal, assert_almost_equal, assert_array_almost_equal,
assert_warns, assert_array_max_ulp, HAS_REFCOUNT
)
from numpy.core._rational_tests import rational
from hypothesis import assume, given, strategies as st
from hypothesis.extra import numpy as hynp
class TestResize:
def test_copies(self):
A = np.array([[1, 2], [3, 4]])
Ar1 = np.array([[1, 2, 3, 4], [1, 2, 3, 4]])
assert_equal(np.resize(A, (2, 4)), Ar1)
Ar2 = np.array([[1, 2], [3, 4], [1, 2], [3, 4]])
assert_equal(np.resize(A, (4, 2)), Ar2)
Ar3 = np.array([[1, 2, 3], [4, 1, 2], [3, 4, 1], [2, 3, 4]])
assert_equal(np.resize(A, (4, 3)), Ar3)
def test_repeats(self):
A = np.array([1, 2, 3])
Ar1 = np.array([[1, 2, 3, 1], [2, 3, 1, 2]])
assert_equal(np.resize(A, (2, 4)), Ar1)
Ar2 = np.array([[1, 2], [3, 1], [2, 3], [1, 2]])
assert_equal(np.resize(A, (4, 2)), Ar2)
Ar3 = np.array([[1, 2, 3], [1, 2, 3], [1, 2, 3], [1, 2, 3]])
assert_equal(np.resize(A, (4, 3)), Ar3)
def test_zeroresize(self):
A = np.array([[1, 2], [3, 4]])
Ar = np.resize(A, (0,))
assert_array_equal(Ar, np.array([]))
assert_equal(A.dtype, Ar.dtype)
Ar = np.resize(A, (0, 2))
assert_equal(Ar.shape, (0, 2))
Ar = np.resize(A, (2, 0))
assert_equal(Ar.shape, (2, 0))
def test_reshape_from_zero(self):
# See also gh-6740
A = np.zeros(0, dtype=[('a', np.float32)])
Ar = np.resize(A, (2, 1))
assert_array_equal(Ar, np.zeros((2, 1), Ar.dtype))
assert_equal(A.dtype, Ar.dtype)
def test_negative_resize(self):
A = np.arange(0, 10, dtype=np.float32)
new_shape = (-10, -1)
with pytest.raises(ValueError, match=r"negative"):
np.resize(A, new_shape=new_shape)
def test_subclass(self):
class MyArray(np.ndarray):
__array_priority__ = 1.
my_arr = np.array([1]).view(MyArray)
assert type(np.resize(my_arr, 5)) is MyArray
assert type(np.resize(my_arr, 0)) is MyArray
my_arr = np.array([]).view(MyArray)
assert type(np.resize(my_arr, 5)) is MyArray
class TestNonarrayArgs:
# check that non-array arguments to functions wrap them in arrays
def test_choose(self):
choices = [[0, 1, 2],
[3, 4, 5],
[5, 6, 7]]
tgt = [5, 1, 5]
a = [2, 0, 1]
out = np.choose(a, choices)
assert_equal(out, tgt)
def test_clip(self):
arr = [-1, 5, 2, 3, 10, -4, -9]
out = np.clip(arr, 2, 7)
tgt = [2, 5, 2, 3, 7, 2, 2]
assert_equal(out, tgt)
def test_compress(self):
arr = [[0, 1, 2, 3, 4],
[5, 6, 7, 8, 9]]
tgt = [[5, 6, 7, 8, 9]]
out = np.compress([0, 1], arr, axis=0)
assert_equal(out, tgt)
def test_count_nonzero(self):
arr = [[0, 1, 7, 0, 0],
[3, 0, 0, 2, 19]]
tgt = np.array([2, 3])
out = np.count_nonzero(arr, axis=1)
assert_equal(out, tgt)
def test_cumproduct(self):
A = [[1, 2, 3], [4, 5, 6]]
assert_(np.all(np.cumproduct(A) == np.array([1, 2, 6, 24, 120, 720])))
def test_diagonal(self):
a = [[0, 1, 2, 3],
[4, 5, 6, 7],
[8, 9, 10, 11]]
out = np.diagonal(a)
tgt = [0, 5, 10]
assert_equal(out, tgt)
def test_mean(self):
A = [[1, 2, 3], [4, 5, 6]]
assert_(np.mean(A) == 3.5)
assert_(np.all(np.mean(A, 0) == np.array([2.5, 3.5, 4.5])))
assert_(np.all(np.mean(A, 1) == np.array([2., 5.])))
with warnings.catch_warnings(record=True) as w:
warnings.filterwarnings('always', '', RuntimeWarning)
assert_(np.isnan(np.mean([])))
assert_(w[0].category is RuntimeWarning)
def test_ptp(self):
a = [3, 4, 5, 10, -3, -5, 6.0]
assert_equal(np.ptp(a, axis=0), 15.0)
def test_prod(self):
arr = [[1, 2, 3, 4],
[5, 6, 7, 9],
[10, 3, 4, 5]]
tgt = [24, 1890, 600]
assert_equal(np.prod(arr, axis=-1), tgt)
def test_ravel(self):
a = [[1, 2, 3], [4, 5, 6], [7, 8, 9], [10, 11, 12]]
tgt = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12]
assert_equal(np.ravel(a), tgt)
def test_repeat(self):
a = [1, 2, 3]
tgt = [1, 1, 2, 2, 3, 3]
out = np.repeat(a, 2)
assert_equal(out, tgt)
def test_reshape(self):
arr = [[1, 2, 3], [4, 5, 6], [7, 8, 9], [10, 11, 12]]
tgt = [[1, 2, 3, 4, 5, 6], [7, 8, 9, 10, 11, 12]]
assert_equal(np.reshape(arr, (2, 6)), tgt)
def test_round(self):
arr = [1.56, 72.54, 6.35, 3.25]
tgt = [1.6, 72.5, 6.4, 3.2]
assert_equal(np.around(arr, decimals=1), tgt)
s = np.float64(1.)
assert_(isinstance(s.round(), np.float64))
assert_equal(s.round(), 1.)
@pytest.mark.parametrize('dtype', [
np.int8, np.int16, np.int32, np.int64,
np.uint8, np.uint16, np.uint32, np.uint64,
np.float16, np.float32, np.float64,
])
def test_dunder_round(self, dtype):
s = dtype(1)
assert_(isinstance(round(s), int))
assert_(isinstance(round(s, None), int))
assert_(isinstance(round(s, ndigits=None), int))
assert_equal(round(s), 1)
assert_equal(round(s, None), 1)
assert_equal(round(s, ndigits=None), 1)
@pytest.mark.parametrize('val, ndigits', [
pytest.param(2**31 - 1, -1,
marks=pytest.mark.xfail(reason="Out of range of int32")
),
(2**31 - 1, 1-math.ceil(math.log10(2**31 - 1))),
(2**31 - 1, -math.ceil(math.log10(2**31 - 1)))
])
def test_dunder_round_edgecases(self, val, ndigits):
assert_equal(round(val, ndigits), round(np.int32(val), ndigits))
def test_dunder_round_accuracy(self):
f = np.float64(5.1 * 10**73)
assert_(isinstance(round(f, -73), np.float64))
assert_array_max_ulp(round(f, -73), 5.0 * 10**73)
assert_(isinstance(round(f, ndigits=-73), np.float64))
assert_array_max_ulp(round(f, ndigits=-73), 5.0 * 10**73)
i = np.int64(501)
assert_(isinstance(round(i, -2), np.int64))
assert_array_max_ulp(round(i, -2), 500)
assert_(isinstance(round(i, ndigits=-2), np.int64))
assert_array_max_ulp(round(i, ndigits=-2), 500)
@pytest.mark.xfail(raises=AssertionError, reason="gh-15896")
def test_round_py_consistency(self):
f = 5.1 * 10**73
assert_equal(round(np.float64(f), -73), round(f, -73))
def test_searchsorted(self):
arr = [-8, -5, -1, 3, 6, 10]
out = np.searchsorted(arr, 0)
assert_equal(out, 3)
def test_size(self):
A = [[1, 2, 3], [4, 5, 6]]
assert_(np.size(A) == 6)
assert_(np.size(A, 0) == 2)
assert_(np.size(A, 1) == 3)
def test_squeeze(self):
A = [[[1, 1, 1], [2, 2, 2], [3, 3, 3]]]
assert_equal(np.squeeze(A).shape, (3, 3))
assert_equal(np.squeeze(np.zeros((1, 3, 1))).shape, (3,))
assert_equal(np.squeeze(np.zeros((1, 3, 1)), axis=0).shape, (3, 1))
assert_equal(np.squeeze(np.zeros((1, 3, 1)), axis=-1).shape, (1, 3))
assert_equal(np.squeeze(np.zeros((1, 3, 1)), axis=2).shape, (1, 3))
assert_equal(np.squeeze([np.zeros((3, 1))]).shape, (3,))
assert_equal(np.squeeze([np.zeros((3, 1))], axis=0).shape, (3, 1))
assert_equal(np.squeeze([np.zeros((3, 1))], axis=2).shape, (1, 3))
assert_equal(np.squeeze([np.zeros((3, 1))], axis=-1).shape, (1, 3))
def test_std(self):
A = [[1, 2, 3], [4, 5, 6]]
assert_almost_equal(np.std(A), 1.707825127659933)
assert_almost_equal(np.std(A, 0), np.array([1.5, 1.5, 1.5]))
assert_almost_equal(np.std(A, 1), np.array([0.81649658, 0.81649658]))
with warnings.catch_warnings(record=True) as w:
warnings.filterwarnings('always', '', RuntimeWarning)
assert_(np.isnan(np.std([])))
assert_(w[0].category is RuntimeWarning)
def test_swapaxes(self):
tgt = [[[0, 4], [2, 6]], [[1, 5], [3, 7]]]
a = [[[0, 1], [2, 3]], [[4, 5], [6, 7]]]
out = np.swapaxes(a, 0, 2)
assert_equal(out, tgt)
def test_sum(self):
m = [[1, 2, 3],
[4, 5, 6],
[7, 8, 9]]
tgt = [[6], [15], [24]]
out = np.sum(m, axis=1, keepdims=True)
assert_equal(tgt, out)
def test_take(self):
tgt = [2, 3, 5]
indices = [1, 2, 4]
a = [1, 2, 3, 4, 5]
out = np.take(a, indices)
assert_equal(out, tgt)
def test_trace(self):
c = [[1, 2], [3, 4], [5, 6]]
assert_equal(np.trace(c), 5)
def test_transpose(self):
arr = [[1, 2], [3, 4], [5, 6]]
tgt = [[1, 3, 5], [2, 4, 6]]
assert_equal(np.transpose(arr, (1, 0)), tgt)
def test_var(self):
A = [[1, 2, 3], [4, 5, 6]]
assert_almost_equal(np.var(A), 2.9166666666666665)
assert_almost_equal(np.var(A, 0), np.array([2.25, 2.25, 2.25]))
assert_almost_equal(np.var(A, 1), np.array([0.66666667, 0.66666667]))
with warnings.catch_warnings(record=True) as w:
warnings.filterwarnings('always', '', RuntimeWarning)
assert_(np.isnan(np.var([])))
assert_(w[0].category is RuntimeWarning)
B = np.array([None, 0])
B[0] = 1j
assert_almost_equal(np.var(B), 0.25)
class TestIsscalar:
def test_isscalar(self):
assert_(np.isscalar(3.1))
assert_(np.isscalar(np.int16(12345)))
assert_(np.isscalar(False))
assert_(np.isscalar('numpy'))
assert_(not np.isscalar([3.1]))
assert_(not np.isscalar(None))
# PEP 3141
from fractions import Fraction
assert_(np.isscalar(Fraction(5, 17)))
from numbers import Number
assert_(np.isscalar(Number()))
class TestBoolScalar:
def test_logical(self):
f = np.False_
t = np.True_
s = "xyz"
assert_((t and s) is s)
assert_((f and s) is f)
def test_bitwise_or(self):
f = np.False_
t = np.True_
assert_((t | t) is t)
assert_((f | t) is t)
assert_((t | f) is t)
assert_((f | f) is f)
def test_bitwise_and(self):
f = np.False_
t = np.True_
assert_((t & t) is t)
assert_((f & t) is f)
assert_((t & f) is f)
assert_((f & f) is f)
def test_bitwise_xor(self):
f = np.False_
t = np.True_
assert_((t ^ t) is f)
assert_((f ^ t) is t)
assert_((t ^ f) is t)
assert_((f ^ f) is f)
class TestBoolArray:
def setup(self):
# offset for simd tests
self.t = np.array([True] * 41, dtype=bool)[1::]
self.f = np.array([False] * 41, dtype=bool)[1::]
self.o = np.array([False] * 42, dtype=bool)[2::]
self.nm = self.f.copy()
self.im = self.t.copy()
self.nm[3] = True
self.nm[-2] = True
self.im[3] = False
self.im[-2] = False
def test_all_any(self):
assert_(self.t.all())
assert_(self.t.any())
assert_(not self.f.all())
assert_(not self.f.any())
assert_(self.nm.any())
assert_(self.im.any())
assert_(not self.nm.all())
assert_(not self.im.all())
# check bad element in all positions
for i in range(256 - 7):
d = np.array([False] * 256, dtype=bool)[7::]
d[i] = True
assert_(np.any(d))
e = np.array([True] * 256, dtype=bool)[7::]
e[i] = False
assert_(not np.all(e))
assert_array_equal(e, ~d)
# big array test for blocked libc loops
for i in list(range(9, 6000, 507)) + [7764, 90021, -10]:
d = np.array([False] * 100043, dtype=bool)
d[i] = True
assert_(np.any(d), msg="%r" % i)
e = np.array([True] * 100043, dtype=bool)
e[i] = False
assert_(not np.all(e), msg="%r" % i)
def test_logical_not_abs(self):
assert_array_equal(~self.t, self.f)
assert_array_equal(np.abs(~self.t), self.f)
assert_array_equal(np.abs(~self.f), self.t)
assert_array_equal(np.abs(self.f), self.f)
assert_array_equal(~np.abs(self.f), self.t)
assert_array_equal(~np.abs(self.t), self.f)
assert_array_equal(np.abs(~self.nm), self.im)
np.logical_not(self.t, out=self.o)
assert_array_equal(self.o, self.f)
np.abs(self.t, out=self.o)
assert_array_equal(self.o, self.t)
def test_logical_and_or_xor(self):
assert_array_equal(self.t | self.t, self.t)
assert_array_equal(self.f | self.f, self.f)
assert_array_equal(self.t | self.f, self.t)
assert_array_equal(self.f | self.t, self.t)
np.logical_or(self.t, self.t, out=self.o)
assert_array_equal(self.o, self.t)
assert_array_equal(self.t & self.t, self.t)
assert_array_equal(self.f & self.f, self.f)
assert_array_equal(self.t & self.f, self.f)
assert_array_equal(self.f & self.t, self.f)
np.logical_and(self.t, self.t, out=self.o)
assert_array_equal(self.o, self.t)
assert_array_equal(self.t ^ self.t, self.f)
assert_array_equal(self.f ^ self.f, self.f)
assert_array_equal(self.t ^ self.f, self.t)
assert_array_equal(self.f ^ self.t, self.t)
np.logical_xor(self.t, self.t, out=self.o)
assert_array_equal(self.o, self.f)
assert_array_equal(self.nm & self.t, self.nm)
assert_array_equal(self.im & self.f, False)
assert_array_equal(self.nm & True, self.nm)
assert_array_equal(self.im & False, self.f)
assert_array_equal(self.nm | self.t, self.t)
assert_array_equal(self.im | self.f, self.im)
assert_array_equal(self.nm | True, self.t)
assert_array_equal(self.im | False, self.im)
assert_array_equal(self.nm ^ self.t, self.im)
assert_array_equal(self.im ^ self.f, self.im)
assert_array_equal(self.nm ^ True, self.im)
assert_array_equal(self.im ^ False, self.im)
class TestBoolCmp:
def setup(self):
self.f = np.ones(256, dtype=np.float32)
self.ef = np.ones(self.f.size, dtype=bool)
self.d = np.ones(128, dtype=np.float64)
self.ed = np.ones(self.d.size, dtype=bool)
# generate values for all permutation of 256bit simd vectors
s = 0
for i in range(32):
self.f[s:s+8] = [i & 2**x for x in range(8)]
self.ef[s:s+8] = [(i & 2**x) != 0 for x in range(8)]
s += 8
s = 0
for i in range(16):
self.d[s:s+4] = [i & 2**x for x in range(4)]
self.ed[s:s+4] = [(i & 2**x) != 0 for x in range(4)]
s += 4
self.nf = self.f.copy()
self.nd = self.d.copy()
self.nf[self.ef] = np.nan
self.nd[self.ed] = np.nan
self.inff = self.f.copy()
self.infd = self.d.copy()
self.inff[::3][self.ef[::3]] = np.inf
self.infd[::3][self.ed[::3]] = np.inf
self.inff[1::3][self.ef[1::3]] = -np.inf
self.infd[1::3][self.ed[1::3]] = -np.inf
self.inff[2::3][self.ef[2::3]] = np.nan
self.infd[2::3][self.ed[2::3]] = np.nan
self.efnonan = self.ef.copy()
self.efnonan[2::3] = False
self.ednonan = self.ed.copy()
self.ednonan[2::3] = False
self.signf = self.f.copy()
self.signd = self.d.copy()
self.signf[self.ef] *= -1.
self.signd[self.ed] *= -1.
self.signf[1::6][self.ef[1::6]] = -np.inf
self.signd[1::6][self.ed[1::6]] = -np.inf
self.signf[3::6][self.ef[3::6]] = -np.nan
self.signd[3::6][self.ed[3::6]] = -np.nan
self.signf[4::6][self.ef[4::6]] = -0.
self.signd[4::6][self.ed[4::6]] = -0.
def test_float(self):
# offset for alignment test
for i in range(4):
assert_array_equal(self.f[i:] > 0, self.ef[i:])
assert_array_equal(self.f[i:] - 1 >= 0, self.ef[i:])
assert_array_equal(self.f[i:] == 0, ~self.ef[i:])
assert_array_equal(-self.f[i:] < 0, self.ef[i:])
assert_array_equal(-self.f[i:] + 1 <= 0, self.ef[i:])
r = self.f[i:] != 0
assert_array_equal(r, self.ef[i:])
r2 = self.f[i:] != np.zeros_like(self.f[i:])
r3 = 0 != self.f[i:]
assert_array_equal(r, r2)
assert_array_equal(r, r3)
# check bool == 0x1
assert_array_equal(r.view(np.int8), r.astype(np.int8))
assert_array_equal(r2.view(np.int8), r2.astype(np.int8))
assert_array_equal(r3.view(np.int8), r3.astype(np.int8))
# isnan on amd64 takes the same code path
assert_array_equal(np.isnan(self.nf[i:]), self.ef[i:])
assert_array_equal(np.isfinite(self.nf[i:]), ~self.ef[i:])
assert_array_equal(np.isfinite(self.inff[i:]), ~self.ef[i:])
assert_array_equal(np.isinf(self.inff[i:]), self.efnonan[i:])
assert_array_equal(np.signbit(self.signf[i:]), self.ef[i:])
def test_double(self):
# offset for alignment test
for i in range(2):
assert_array_equal(self.d[i:] > 0, self.ed[i:])
assert_array_equal(self.d[i:] - 1 >= 0, self.ed[i:])
assert_array_equal(self.d[i:] == 0, ~self.ed[i:])
assert_array_equal(-self.d[i:] < 0, self.ed[i:])
assert_array_equal(-self.d[i:] + 1 <= 0, self.ed[i:])
r = self.d[i:] != 0
assert_array_equal(r, self.ed[i:])
r2 = self.d[i:] != np.zeros_like(self.d[i:])
r3 = 0 != self.d[i:]
assert_array_equal(r, r2)
assert_array_equal(r, r3)
# check bool == 0x1
assert_array_equal(r.view(np.int8), r.astype(np.int8))
assert_array_equal(r2.view(np.int8), r2.astype(np.int8))
assert_array_equal(r3.view(np.int8), r3.astype(np.int8))
# isnan on amd64 takes the same code path
assert_array_equal(np.isnan(self.nd[i:]), self.ed[i:])
assert_array_equal(np.isfinite(self.nd[i:]), ~self.ed[i:])
assert_array_equal(np.isfinite(self.infd[i:]), ~self.ed[i:])
assert_array_equal(np.isinf(self.infd[i:]), self.ednonan[i:])
assert_array_equal(np.signbit(self.signd[i:]), self.ed[i:])
class TestSeterr:
def test_default(self):
err = np.geterr()
assert_equal(err,
dict(divide='warn',
invalid='warn',
over='warn',
under='ignore')
)
def test_set(self):
with np.errstate():
err = np.seterr()
old = np.seterr(divide='print')
assert_(err == old)
new = np.seterr()
assert_(new['divide'] == 'print')
np.seterr(over='raise')
assert_(np.geterr()['over'] == 'raise')
assert_(new['divide'] == 'print')
np.seterr(**old)
assert_(np.geterr() == old)
@pytest.mark.skipif(platform.machine() == "armv5tel", reason="See gh-413.")
def test_divide_err(self):
with np.errstate(divide='raise'):
with assert_raises(FloatingPointError):
np.array([1.]) / np.array([0.])
np.seterr(divide='ignore')
np.array([1.]) / np.array([0.])
def test_errobj(self):
olderrobj = np.geterrobj()
self.called = 0
try:
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
with np.errstate(divide='warn'):
np.seterrobj([20000, 1, None])
np.array([1.]) / np.array([0.])
assert_equal(len(w), 1)
def log_err(*args):
self.called += 1
extobj_err = args
assert_(len(extobj_err) == 2)
assert_("divide" in extobj_err[0])
with np.errstate(divide='ignore'):
np.seterrobj([20000, 3, log_err])
np.array([1.]) / np.array([0.])
assert_equal(self.called, 1)
np.seterrobj(olderrobj)
with np.errstate(divide='ignore'):
np.divide(1., 0., extobj=[20000, 3, log_err])
assert_equal(self.called, 2)
finally:
np.seterrobj(olderrobj)
del self.called
def test_errobj_noerrmask(self):
# errmask = 0 has a special code path for the default
olderrobj = np.geterrobj()
try:
# set errobj to something non default
np.seterrobj([umath.UFUNC_BUFSIZE_DEFAULT,
umath.ERR_DEFAULT + 1, None])
# call a ufunc
np.isnan(np.array([6]))
# same with the default, lots of times to get rid of possible
# pre-existing stack in the code
for i in range(10000):
np.seterrobj([umath.UFUNC_BUFSIZE_DEFAULT, umath.ERR_DEFAULT,
None])
np.isnan(np.array([6]))
finally:
np.seterrobj(olderrobj)
class TestFloatExceptions:
def assert_raises_fpe(self, fpeerr, flop, x, y):
ftype = type(x)
try:
flop(x, y)
assert_(False,
"Type %s did not raise fpe error '%s'." % (ftype, fpeerr))
except FloatingPointError as exc:
assert_(str(exc).find(fpeerr) >= 0,
"Type %s raised wrong fpe error '%s'." % (ftype, exc))
def assert_op_raises_fpe(self, fpeerr, flop, sc1, sc2):
# Check that fpe exception is raised.
#
# Given a floating operation `flop` and two scalar values, check that
# the operation raises the floating point exception specified by
# `fpeerr`. Tests all variants with 0-d array scalars as well.
self.assert_raises_fpe(fpeerr, flop, sc1, sc2)
self.assert_raises_fpe(fpeerr, flop, sc1[()], sc2)
self.assert_raises_fpe(fpeerr, flop, sc1, sc2[()])
self.assert_raises_fpe(fpeerr, flop, sc1[()], sc2[()])
def test_floating_exceptions(self):
# Test basic arithmetic function errors
with np.errstate(all='raise'):
# Test for all real and complex float types
for typecode in np.typecodes['AllFloat']:
ftype = np.obj2sctype(typecode)
if np.dtype(ftype).kind == 'f':
# Get some extreme values for the type
fi = np.finfo(ftype)
ft_tiny = fi.tiny
ft_max = fi.max
ft_eps = fi.eps
underflow = 'underflow'
divbyzero = 'divide by zero'
else:
# 'c', complex, corresponding real dtype
rtype = type(ftype(0).real)
fi = np.finfo(rtype)
ft_tiny = ftype(fi.tiny)
ft_max = ftype(fi.max)
ft_eps = ftype(fi.eps)
# The complex types raise different exceptions
underflow = ''
divbyzero = ''
overflow = 'overflow'
invalid = 'invalid'
self.assert_raises_fpe(underflow,
lambda a, b: a/b, ft_tiny, ft_max)
self.assert_raises_fpe(underflow,
lambda a, b: a*b, ft_tiny, ft_tiny)
self.assert_raises_fpe(overflow,
lambda a, b: a*b, ft_max, ftype(2))
self.assert_raises_fpe(overflow,
lambda a, b: a/b, ft_max, ftype(0.5))
self.assert_raises_fpe(overflow,
lambda a, b: a+b, ft_max, ft_max*ft_eps)
self.assert_raises_fpe(overflow,
lambda a, b: a-b, -ft_max, ft_max*ft_eps)
self.assert_raises_fpe(overflow,
np.power, ftype(2), ftype(2**fi.nexp))
self.assert_raises_fpe(divbyzero,
lambda a, b: a/b, ftype(1), ftype(0))
self.assert_raises_fpe(invalid,
lambda a, b: a/b, ftype(np.inf), ftype(np.inf))
self.assert_raises_fpe(invalid,
lambda a, b: a/b, ftype(0), ftype(0))
self.assert_raises_fpe(invalid,
lambda a, b: a-b, ftype(np.inf), ftype(np.inf))
self.assert_raises_fpe(invalid,
lambda a, b: a+b, ftype(np.inf), ftype(-np.inf))
self.assert_raises_fpe(invalid,
lambda a, b: a*b, ftype(0), ftype(np.inf))
def test_warnings(self):
# test warning code path
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
with np.errstate(all="warn"):
np.divide(1, 0.)
assert_equal(len(w), 1)
assert_("divide by zero" in str(w[0].message))
np.array(1e300) * np.array(1e300)
assert_equal(len(w), 2)
assert_("overflow" in str(w[-1].message))
np.array(np.inf) - np.array(np.inf)
assert_equal(len(w), 3)
assert_("invalid value" in str(w[-1].message))
np.array(1e-300) * np.array(1e-300)
assert_equal(len(w), 4)
assert_("underflow" in str(w[-1].message))
class TestTypes:
def check_promotion_cases(self, promote_func):
# tests that the scalars get coerced correctly.
b = np.bool_(0)
i8, i16, i32, i64 = np.int8(0), np.int16(0), np.int32(0), np.int64(0)
u8, u16, u32, u64 = np.uint8(0), np.uint16(0), np.uint32(0), np.uint64(0)
f32, f64, fld = np.float32(0), np.float64(0), np.longdouble(0)
c64, c128, cld = np.complex64(0), np.complex128(0), np.clongdouble(0)
# coercion within the same kind
assert_equal(promote_func(i8, i16), np.dtype(np.int16))
assert_equal(promote_func(i32, i8), np.dtype(np.int32))
assert_equal(promote_func(i16, i64), np.dtype(np.int64))
assert_equal(promote_func(u8, u32), np.dtype(np.uint32))
assert_equal(promote_func(f32, f64), np.dtype(np.float64))
assert_equal(promote_func(fld, f32), np.dtype(np.longdouble))
assert_equal(promote_func(f64, fld), np.dtype(np.longdouble))
assert_equal(promote_func(c128, c64), np.dtype(np.complex128))
assert_equal(promote_func(cld, c128), np.dtype(np.clongdouble))
assert_equal(promote_func(c64, fld), np.dtype(np.clongdouble))
# coercion between kinds
assert_equal(promote_func(b, i32), np.dtype(np.int32))
assert_equal(promote_func(b, u8), np.dtype(np.uint8))
assert_equal(promote_func(i8, u8), np.dtype(np.int16))
assert_equal(promote_func(u8, i32), np.dtype(np.int32))
assert_equal(promote_func(i64, u32), np.dtype(np.int64))
assert_equal(promote_func(u64, i32), np.dtype(np.float64))
assert_equal(promote_func(i32, f32), np.dtype(np.float64))
assert_equal(promote_func(i64, f32), np.dtype(np.float64))
assert_equal(promote_func(f32, i16), np.dtype(np.float32))
assert_equal(promote_func(f32, u32), np.dtype(np.float64))
assert_equal(promote_func(f32, c64), np.dtype(np.complex64))
assert_equal(promote_func(c128, f32), np.dtype(np.complex128))
assert_equal(promote_func(cld, f64), np.dtype(np.clongdouble))
# coercion between scalars and 1-D arrays
assert_equal(promote_func(np.array([b]), i8), np.dtype(np.int8))
assert_equal(promote_func(np.array([b]), u8), np.dtype(np.uint8))
assert_equal(promote_func(np.array([b]), i32), np.dtype(np.int32))
assert_equal(promote_func(np.array([b]), u32), np.dtype(np.uint32))
assert_equal(promote_func(np.array([i8]), i64), np.dtype(np.int8))
assert_equal(promote_func(u64, np.array([i32])), np.dtype(np.int32))
assert_equal(promote_func(i64, np.array([u32])), np.dtype(np.uint32))
assert_equal(promote_func(np.int32(-1), np.array([u64])),
np.dtype(np.float64))
assert_equal(promote_func(f64, np.array([f32])), np.dtype(np.float32))
assert_equal(promote_func(fld, np.array([f32])), np.dtype(np.float32))
assert_equal(promote_func(np.array([f64]), fld), np.dtype(np.float64))
assert_equal(promote_func(fld, np.array([c64])),
np.dtype(np.complex64))
assert_equal(promote_func(c64, np.array([f64])),
np.dtype(np.complex128))
assert_equal(promote_func(np.complex64(3j), np.array([f64])),
np.dtype(np.complex128))
# coercion between scalars and 1-D arrays, where
# the scalar has greater kind than the array
assert_equal(promote_func(np.array([b]), f64), np.dtype(np.float64))
assert_equal(promote_func(np.array([b]), i64), np.dtype(np.int64))
assert_equal(promote_func(np.array([b]), u64), np.dtype(np.uint64))
assert_equal(promote_func(np.array([i8]), f64), np.dtype(np.float64))
assert_equal(promote_func(np.array([u16]), f64), np.dtype(np.float64))
# uint and int are treated as the same "kind" for
# the purposes of array-scalar promotion.
assert_equal(promote_func(np.array([u16]), i32), np.dtype(np.uint16))
# float and complex are treated as the same "kind" for
# the purposes of array-scalar promotion, so that you can do
# (0j + float32array) to get a complex64 array instead of
# a complex128 array.
assert_equal(promote_func(np.array([f32]), c128),
np.dtype(np.complex64))
def test_coercion(self):
def res_type(a, b):
return np.add(a, b).dtype
self.check_promotion_cases(res_type)
# Use-case: float/complex scalar * bool/int8 array
# shouldn't narrow the float/complex type
for a in [np.array([True, False]), np.array([-3, 12], dtype=np.int8)]:
b = 1.234 * a
assert_equal(b.dtype, np.dtype('f8'), "array type %s" % a.dtype)
b = np.longdouble(1.234) * a
assert_equal(b.dtype, np.dtype(np.longdouble),
"array type %s" % a.dtype)
b = np.float64(1.234) * a
assert_equal(b.dtype, np.dtype('f8'), "array type %s" % a.dtype)
b = np.float32(1.234) * a
assert_equal(b.dtype, np.dtype('f4'), "array type %s" % a.dtype)
b = np.float16(1.234) * a
assert_equal(b.dtype, np.dtype('f2'), "array type %s" % a.dtype)
b = 1.234j * a
assert_equal(b.dtype, np.dtype('c16'), "array type %s" % a.dtype)
b = np.clongdouble(1.234j) * a
assert_equal(b.dtype, np.dtype(np.clongdouble),
"array type %s" % a.dtype)
b = np.complex128(1.234j) * a
assert_equal(b.dtype, np.dtype('c16'), "array type %s" % a.dtype)
b = np.complex64(1.234j) * a
assert_equal(b.dtype, np.dtype('c8'), "array type %s" % a.dtype)
# The following use-case is problematic, and to resolve its
# tricky side-effects requires more changes.
#
# Use-case: (1-t)*a, where 't' is a boolean array and 'a' is
# a float32, shouldn't promote to float64
#
# a = np.array([1.0, 1.5], dtype=np.float32)
# t = np.array([True, False])
# b = t*a
# assert_equal(b, [1.0, 0.0])
# assert_equal(b.dtype, np.dtype('f4'))
# b = (1-t)*a
# assert_equal(b, [0.0, 1.5])
# assert_equal(b.dtype, np.dtype('f4'))
#
# Probably ~t (bitwise negation) is more proper to use here,
# but this is arguably less intuitive to understand at a glance, and
# would fail if 't' is actually an integer array instead of boolean:
#
# b = (~t)*a
# assert_equal(b, [0.0, 1.5])
# assert_equal(b.dtype, np.dtype('f4'))
def test_result_type(self):
self.check_promotion_cases(np.result_type)
assert_(np.result_type(None) == np.dtype(None))
def test_promote_types_endian(self):
# promote_types should always return native-endian types
assert_equal(np.promote_types('<i8', '<i8'), np.dtype('i8'))
assert_equal(np.promote_types('>i8', '>i8'), np.dtype('i8'))
with pytest.warns(FutureWarning,
match="Promotion of numbers and bools to strings"):
assert_equal(np.promote_types('>i8', '>U16'), np.dtype('U21'))
assert_equal(np.promote_types('<i8', '<U16'), np.dtype('U21'))
assert_equal(np.promote_types('>U16', '>i8'), np.dtype('U21'))
assert_equal(np.promote_types('<U16', '<i8'), np.dtype('U21'))
assert_equal(np.promote_types('<S5', '<U8'), np.dtype('U8'))
assert_equal(np.promote_types('>S5', '>U8'), np.dtype('U8'))
assert_equal(np.promote_types('<U8', '<S5'), np.dtype('U8'))
assert_equal(np.promote_types('>U8', '>S5'), np.dtype('U8'))
assert_equal(np.promote_types('<U5', '<U8'), np.dtype('U8'))
assert_equal(np.promote_types('>U8', '>U5'), np.dtype('U8'))
assert_equal(np.promote_types('<M8', '<M8'), np.dtype('M8'))
assert_equal(np.promote_types('>M8', '>M8'), np.dtype('M8'))
assert_equal(np.promote_types('<m8', '<m8'), np.dtype('m8'))
assert_equal(np.promote_types('>m8', '>m8'), np.dtype('m8'))
def test_can_cast_and_promote_usertypes(self):
# The rational type defines safe casting for signed integers,
# boolean. Rational itself *does* cast safely to double.
# (rational does not actually cast to all signed integers, e.g.
# int64 can be both long and longlong and it registers only the first)
valid_types = ["int8", "int16", "int32", "int64", "bool"]
invalid_types = "BHILQP" + "FDG" + "mM" + "f" + "V"
rational_dt = np.dtype(rational)
for numpy_dtype in valid_types:
numpy_dtype = np.dtype(numpy_dtype)
assert np.can_cast(numpy_dtype, rational_dt)
assert np.promote_types(numpy_dtype, rational_dt) is rational_dt
for numpy_dtype in invalid_types:
numpy_dtype = np.dtype(numpy_dtype)
assert not np.can_cast(numpy_dtype, rational_dt)
with pytest.raises(TypeError):
np.promote_types(numpy_dtype, rational_dt)
double_dt = np.dtype("double")
assert np.can_cast(rational_dt, double_dt)
assert np.promote_types(double_dt, rational_dt) is double_dt
@pytest.mark.parametrize("swap", ["", "swap"])
@pytest.mark.parametrize("string_dtype", ["U", "S"])
def test_promote_types_strings(self, swap, string_dtype):
if swap == "swap":
promote_types = lambda a, b: np.promote_types(b, a)
else:
promote_types = np.promote_types
S = string_dtype
with pytest.warns(FutureWarning,
match="Promotion of numbers and bools to strings") as record:
# Promote numeric with unsized string:
assert_equal(promote_types('bool', S), np.dtype(S+'5'))
assert_equal(promote_types('b', S), np.dtype(S+'4'))
assert_equal(promote_types('u1', S), np.dtype(S+'3'))
assert_equal(promote_types('u2', S), np.dtype(S+'5'))
assert_equal(promote_types('u4', S), np.dtype(S+'10'))
assert_equal(promote_types('u8', S), np.dtype(S+'20'))
assert_equal(promote_types('i1', S), np.dtype(S+'4'))
assert_equal(promote_types('i2', S), np.dtype(S+'6'))
assert_equal(promote_types('i4', S), np.dtype(S+'11'))
assert_equal(promote_types('i8', S), np.dtype(S+'21'))
# Promote numeric with sized string:
assert_equal(promote_types('bool', S+'1'), np.dtype(S+'5'))
assert_equal(promote_types('bool', S+'30'), np.dtype(S+'30'))
assert_equal(promote_types('b', S+'1'), np.dtype(S+'4'))
assert_equal(promote_types('b', S+'30'), np.dtype(S+'30'))
assert_equal(promote_types('u1', S+'1'), np.dtype(S+'3'))
assert_equal(promote_types('u1', S+'30'), np.dtype(S+'30'))
assert_equal(promote_types('u2', S+'1'), np.dtype(S+'5'))
assert_equal(promote_types('u2', S+'30'), np.dtype(S+'30'))
assert_equal(promote_types('u4', S+'1'), np.dtype(S+'10'))
assert_equal(promote_types('u4', S+'30'), np.dtype(S+'30'))
assert_equal(promote_types('u8', S+'1'), np.dtype(S+'20'))
assert_equal(promote_types('u8', S+'30'), np.dtype(S+'30'))
# Promote with object:
assert_equal(promote_types('O', S+'30'), np.dtype('O'))
assert len(record) == 22 # each string promotion gave one warning
@pytest.mark.parametrize(["dtype1", "dtype2"],
[[np.dtype("V6"), np.dtype("V10")],
[np.dtype([("name1", "i8")]), np.dtype([("name2", "i8")])],
[np.dtype("i8,i8"), np.dtype("i4,i4")],
])
def test_invalid_void_promotion(self, dtype1, dtype2):
# Mainly test structured void promotion, which currently allows
# byte-swapping, but nothing else:
with pytest.raises(TypeError):
np.promote_types(dtype1, dtype2)
@pytest.mark.parametrize(["dtype1", "dtype2"],
[[np.dtype("V10"), np.dtype("V10")],
[np.dtype([("name1", "<i8")]), np.dtype([("name1", ">i8")])],
[np.dtype("i8,i8"), np.dtype("i8,>i8")],
])
def test_valid_void_promotion(self, dtype1, dtype2):
assert np.promote_types(dtype1, dtype2) is dtype1
@pytest.mark.parametrize("dtype",
list(np.typecodes["All"]) +
["i,i", "S3", "S100", "U3", "U100", rational])
def test_promote_identical_types_metadata(self, dtype):
# The same type passed in twice to promote types always
# preserves metadata
metadata = {1: 1}
dtype = np.dtype(dtype, metadata=metadata)
res = np.promote_types(dtype, dtype)
assert res.metadata == dtype.metadata
# byte-swapping preserves and makes the dtype native:
dtype = dtype.newbyteorder()
if dtype.isnative:
# The type does not have byte swapping
return
res = np.promote_types(dtype, dtype)
if res.char in "?bhilqpBHILQPefdgFDGOmM" or dtype.type is rational:
# Metadata is lost for simple promotions (they create a new dtype)
assert res.metadata is None
else:
assert res.metadata == metadata
if dtype.kind != "V":
# the result is native (except for structured void)
assert res.isnative
@pytest.mark.slow
@pytest.mark.filterwarnings('ignore:Promotion of numbers:FutureWarning')
@pytest.mark.parametrize(["dtype1", "dtype2"],
itertools.product(
list(np.typecodes["All"]) +
["i,i", "S3", "S100", "U3", "U100", rational],
repeat=2))
def test_promote_types_metadata(self, dtype1, dtype2):
"""Metadata handling in promotion does not appear formalized
right now in NumPy. This test should thus be considered to
document behaviour, rather than test the correct definition of it.
This test is very ugly, it was useful for rewriting part of the
promotion, but probably should eventually be replaced/deleted
(i.e. when metadata handling in promotion is better defined).
"""
metadata1 = {1: 1}
metadata2 = {2: 2}
dtype1 = np.dtype(dtype1, metadata=metadata1)
dtype2 = np.dtype(dtype2, metadata=metadata2)
try:
res = np.promote_types(dtype1, dtype2)
except TypeError:
# Promotion failed, this test only checks metadata
return
if res.char in "?bhilqpBHILQPefdgFDGOmM" or res.type is rational:
# All simple types lose metadata (due to using promotion table):
assert res.metadata is None
elif res == dtype1:
# If one result is the result, it is usually returned unchanged:
assert res is dtype1
elif res == dtype2:
# dtype1 may have been cast to the same type/kind as dtype2.
# If the resulting dtype is identical we currently pick the cast
# version of dtype1, which lost the metadata:
if np.promote_types(dtype1, dtype2.kind) == dtype2:
res.metadata is None
else:
res.metadata == metadata2
else:
assert res.metadata is None
# Try again for byteswapped version
dtype1 = dtype1.newbyteorder()
assert dtype1.metadata == metadata1
res_bs = np.promote_types(dtype1, dtype2)
if res_bs.names is not None:
# Structured promotion doesn't remove byteswap:
assert res_bs.newbyteorder() == res
else:
assert res_bs == res
assert res_bs.metadata == res.metadata
@pytest.mark.parametrize(["dtype1", "dtype2"],
[[np.dtype("V6"), np.dtype("V10")],
[np.dtype([("name1", "i8")]), np.dtype([("name2", "i8")])],
[np.dtype("i8,i8"), np.dtype("i4,i4")],
])
def test_invalid_void_promotion(self, dtype1, dtype2):
# Mainly test structured void promotion, which currently allows
# byte-swapping, but nothing else:
with pytest.raises(TypeError):
np.promote_types(dtype1, dtype2)
@pytest.mark.parametrize(["dtype1", "dtype2"],
[[np.dtype("V10"), np.dtype("V10")],
[np.dtype([("name1", "<i8")]), np.dtype([("name1", ">i8")])],
[np.dtype("i8,i8"), np.dtype("i8,>i8")],
])
def test_valid_void_promotion(self, dtype1, dtype2):
assert np.promote_types(dtype1, dtype2) is dtype1
def test_can_cast(self):
assert_(np.can_cast(np.int32, np.int64))
assert_(np.can_cast(np.float64, complex))
assert_(not np.can_cast(complex, float))
assert_(np.can_cast('i8', 'f8'))
assert_(not np.can_cast('i8', 'f4'))
assert_(np.can_cast('i4', 'S11'))
assert_(np.can_cast('i8', 'i8', 'no'))
assert_(not np.can_cast('<i8', '>i8', 'no'))
assert_(np.can_cast('<i8', '>i8', 'equiv'))
assert_(not np.can_cast('<i4', '>i8', 'equiv'))
assert_(np.can_cast('<i4', '>i8', 'safe'))
assert_(not np.can_cast('<i8', '>i4', 'safe'))
assert_(np.can_cast('<i8', '>i4', 'same_kind'))
assert_(not np.can_cast('<i8', '>u4', 'same_kind'))
assert_(np.can_cast('<i8', '>u4', 'unsafe'))
assert_(np.can_cast('bool', 'S5'))
assert_(not np.can_cast('bool', 'S4'))
assert_(np.can_cast('b', 'S4'))
assert_(not np.can_cast('b', 'S3'))
assert_(np.can_cast('u1', 'S3'))
assert_(not np.can_cast('u1', 'S2'))
assert_(np.can_cast('u2', 'S5'))
assert_(not np.can_cast('u2', 'S4'))
assert_(np.can_cast('u4', 'S10'))
assert_(not np.can_cast('u4', 'S9'))
assert_(np.can_cast('u8', 'S20'))
assert_(not np.can_cast('u8', 'S19'))
assert_(np.can_cast('i1', 'S4'))
assert_(not np.can_cast('i1', 'S3'))
assert_(np.can_cast('i2', 'S6'))
assert_(not np.can_cast('i2', 'S5'))
assert_(np.can_cast('i4', 'S11'))
assert_(not np.can_cast('i4', 'S10'))
assert_(np.can_cast('i8', 'S21'))
assert_(not np.can_cast('i8', 'S20'))
assert_(np.can_cast('bool', 'S5'))
assert_(not np.can_cast('bool', 'S4'))
assert_(np.can_cast('b', 'U4'))
assert_(not np.can_cast('b', 'U3'))
assert_(np.can_cast('u1', 'U3'))
assert_(not np.can_cast('u1', 'U2'))
assert_(np.can_cast('u2', 'U5'))
assert_(not np.can_cast('u2', 'U4'))
assert_(np.can_cast('u4', 'U10'))
assert_(not np.can_cast('u4', 'U9'))
assert_(np.can_cast('u8', 'U20'))
assert_(not np.can_cast('u8', 'U19'))
assert_(np.can_cast('i1', 'U4'))
assert_(not np.can_cast('i1', 'U3'))
assert_(np.can_cast('i2', 'U6'))
assert_(not np.can_cast('i2', 'U5'))
assert_(np.can_cast('i4', 'U11'))
assert_(not np.can_cast('i4', 'U10'))
assert_(np.can_cast('i8', 'U21'))
assert_(not np.can_cast('i8', 'U20'))
assert_raises(TypeError, np.can_cast, 'i4', None)
assert_raises(TypeError, np.can_cast, None, 'i4')
# Also test keyword arguments
assert_(np.can_cast(from_=np.int32, to=np.int64))
def test_can_cast_simple_to_structured(self):
# Non-structured can only be cast to structured in 'unsafe' mode.
assert_(not np.can_cast('i4', 'i4,i4'))
assert_(not np.can_cast('i4', 'i4,i2'))
assert_(np.can_cast('i4', 'i4,i4', casting='unsafe'))
assert_(np.can_cast('i4', 'i4,i2', casting='unsafe'))
# Even if there is just a single field which is OK.
assert_(not np.can_cast('i2', [('f1', 'i4')]))
assert_(not np.can_cast('i2', [('f1', 'i4')], casting='same_kind'))
assert_(np.can_cast('i2', [('f1', 'i4')], casting='unsafe'))
# It should be the same for recursive structured or subarrays.
assert_(not np.can_cast('i2', [('f1', 'i4,i4')]))
assert_(np.can_cast('i2', [('f1', 'i4,i4')], casting='unsafe'))
assert_(not np.can_cast('i2', [('f1', '(2,3)i4')]))
assert_(np.can_cast('i2', [('f1', '(2,3)i4')], casting='unsafe'))
def test_can_cast_structured_to_simple(self):
# Need unsafe casting for structured to simple.
assert_(not np.can_cast([('f1', 'i4')], 'i4'))
assert_(np.can_cast([('f1', 'i4')], 'i4', casting='unsafe'))
assert_(np.can_cast([('f1', 'i4')], 'i2', casting='unsafe'))
# Since it is unclear what is being cast, multiple fields to
# single should not work even for unsafe casting.
assert_(not np.can_cast('i4,i4', 'i4', casting='unsafe'))
# But a single field inside a single field is OK.
assert_(not np.can_cast([('f1', [('x', 'i4')])], 'i4'))
assert_(np.can_cast([('f1', [('x', 'i4')])], 'i4', casting='unsafe'))
# And a subarray is fine too - it will just take the first element
# (arguably not very consistently; might also take the first field).
assert_(not np.can_cast([('f0', '(3,)i4')], 'i4'))
assert_(np.can_cast([('f0', '(3,)i4')], 'i4', casting='unsafe'))
# But a structured subarray with multiple fields should fail.
assert_(not np.can_cast([('f0', ('i4,i4'), (2,))], 'i4',
casting='unsafe'))
def test_can_cast_values(self):
# gh-5917
for dt in np.sctypes['int'] + np.sctypes['uint']:
ii = np.iinfo(dt)
assert_(np.can_cast(ii.min, dt))
assert_(np.can_cast(ii.max, dt))
assert_(not np.can_cast(ii.min - 1, dt))
assert_(not np.can_cast(ii.max + 1, dt))
for dt in np.sctypes['float']:
fi = np.finfo(dt)
assert_(np.can_cast(fi.min, dt))
assert_(np.can_cast(fi.max, dt))
# Custom exception class to test exception propagation in fromiter
class NIterError(Exception):
pass
class TestFromiter:
def makegen(self):
return (x**2 for x in range(24))
def test_types(self):
ai32 = np.fromiter(self.makegen(), np.int32)
ai64 = np.fromiter(self.makegen(), np.int64)
af = np.fromiter(self.makegen(), float)
assert_(ai32.dtype == np.dtype(np.int32))
assert_(ai64.dtype == np.dtype(np.int64))
assert_(af.dtype == np.dtype(float))
def test_lengths(self):
expected = np.array(list(self.makegen()))
a = np.fromiter(self.makegen(), int)
a20 = np.fromiter(self.makegen(), int, 20)
assert_(len(a) == len(expected))
assert_(len(a20) == 20)
assert_raises(ValueError, np.fromiter,
self.makegen(), int, len(expected) + 10)
def test_values(self):
expected = np.array(list(self.makegen()))
a = np.fromiter(self.makegen(), int)
a20 = np.fromiter(self.makegen(), int, 20)
assert_(np.alltrue(a == expected, axis=0))
assert_(np.alltrue(a20 == expected[:20], axis=0))
def load_data(self, n, eindex):
# Utility method for the issue 2592 tests.
# Raise an exception at the desired index in the iterator.
for e in range(n):
if e == eindex:
raise NIterError('error at index %s' % eindex)
yield e
def test_2592(self):
# Test iteration exceptions are correctly raised.
count, eindex = 10, 5
assert_raises(NIterError, np.fromiter,
self.load_data(count, eindex), dtype=int, count=count)
def test_2592_edge(self):
# Test iter. exceptions, edge case (exception at end of iterator).
count = 10
eindex = count-1
assert_raises(NIterError, np.fromiter,
self.load_data(count, eindex), dtype=int, count=count)
class TestNonzero:
def test_nonzero_trivial(self):
assert_equal(np.count_nonzero(np.array([])), 0)
assert_equal(np.count_nonzero(np.array([], dtype='?')), 0)
assert_equal(np.nonzero(np.array([])), ([],))
assert_equal(np.count_nonzero(np.array([0])), 0)
assert_equal(np.count_nonzero(np.array([0], dtype='?')), 0)
assert_equal(np.nonzero(np.array([0])), ([],))
assert_equal(np.count_nonzero(np.array([1])), 1)
assert_equal(np.count_nonzero(np.array([1], dtype='?')), 1)
assert_equal(np.nonzero(np.array([1])), ([0],))
def test_nonzero_zerod(self):
assert_equal(np.count_nonzero(np.array(0)), 0)
assert_equal(np.count_nonzero(np.array(0, dtype='?')), 0)
with assert_warns(DeprecationWarning):
assert_equal(np.nonzero(np.array(0)), ([],))
assert_equal(np.count_nonzero(np.array(1)), 1)
assert_equal(np.count_nonzero(np.array(1, dtype='?')), 1)
with assert_warns(DeprecationWarning):
assert_equal(np.nonzero(np.array(1)), ([0],))
def test_nonzero_onedim(self):
x = np.array([1, 0, 2, -1, 0, 0, 8])
assert_equal(np.count_nonzero(x), 4)
assert_equal(np.count_nonzero(x), 4)
assert_equal(np.nonzero(x), ([0, 2, 3, 6],))
# x = np.array([(1, 2), (0, 0), (1, 1), (-1, 3), (0, 7)],
# dtype=[('a', 'i4'), ('b', 'i2')])
x = np.array([(1, 2, -5, -3), (0, 0, 2, 7), (1, 1, 0, 1), (-1, 3, 1, 0), (0, 7, 0, 4)],
dtype=[('a', 'i4'), ('b', 'i2'), ('c', 'i1'), ('d', 'i8')])
assert_equal(np.count_nonzero(x['a']), 3)
assert_equal(np.count_nonzero(x['b']), 4)
assert_equal(np.count_nonzero(x['c']), 3)
assert_equal(np.count_nonzero(x['d']), 4)
assert_equal(np.nonzero(x['a']), ([0, 2, 3],))
assert_equal(np.nonzero(x['b']), ([0, 2, 3, 4],))
def test_nonzero_twodim(self):
x = np.array([[0, 1, 0], [2, 0, 3]])
assert_equal(np.count_nonzero(x.astype('i1')), 3)
assert_equal(np.count_nonzero(x.astype('i2')), 3)
assert_equal(np.count_nonzero(x.astype('i4')), 3)
assert_equal(np.count_nonzero(x.astype('i8')), 3)
assert_equal(np.nonzero(x), ([0, 1, 1], [1, 0, 2]))
x = np.eye(3)
assert_equal(np.count_nonzero(x.astype('i1')), 3)
assert_equal(np.count_nonzero(x.astype('i2')), 3)
assert_equal(np.count_nonzero(x.astype('i4')), 3)
assert_equal(np.count_nonzero(x.astype('i8')), 3)
assert_equal(np.nonzero(x), ([0, 1, 2], [0, 1, 2]))
x = np.array([[(0, 1), (0, 0), (1, 11)],
[(1, 1), (1, 0), (0, 0)],
[(0, 0), (1, 5), (0, 1)]], dtype=[('a', 'f4'), ('b', 'u1')])
assert_equal(np.count_nonzero(x['a']), 4)
assert_equal(np.count_nonzero(x['b']), 5)
assert_equal(np.nonzero(x['a']), ([0, 1, 1, 2], [2, 0, 1, 1]))
assert_equal(np.nonzero(x['b']), ([0, 0, 1, 2, 2], [0, 2, 0, 1, 2]))
assert_(not x['a'].T.flags.aligned)
assert_equal(np.count_nonzero(x['a'].T), 4)
assert_equal(np.count_nonzero(x['b'].T), 5)
assert_equal(np.nonzero(x['a'].T), ([0, 1, 1, 2], [1, 1, 2, 0]))
assert_equal(np.nonzero(x['b'].T), ([0, 0, 1, 2, 2], [0, 1, 2, 0, 2]))
def test_sparse(self):
# test special sparse condition boolean code path
for i in range(20):
c = np.zeros(200, dtype=bool)
c[i::20] = True
assert_equal(np.nonzero(c)[0], np.arange(i, 200 + i, 20))
c = np.zeros(400, dtype=bool)
c[10 + i:20 + i] = True
c[20 + i*2] = True
assert_equal(np.nonzero(c)[0],
np.concatenate((np.arange(10 + i, 20 + i), [20 + i*2])))
def test_return_type(self):
class C(np.ndarray):
pass
for view in (C, np.ndarray):
for nd in range(1, 4):
shape = tuple(range(2, 2+nd))
x = np.arange(np.prod(shape)).reshape(shape).view(view)
for nzx in (np.nonzero(x), x.nonzero()):
for nzx_i in nzx:
assert_(type(nzx_i) is np.ndarray)
assert_(nzx_i.flags.writeable)
def test_count_nonzero_axis(self):
# Basic check of functionality
m = np.array([[0, 1, 7, 0, 0], [3, 0, 0, 2, 19]])
expected = np.array([1, 1, 1, 1, 1])
assert_equal(np.count_nonzero(m, axis=0), expected)
expected = np.array([2, 3])
assert_equal(np.count_nonzero(m, axis=1), expected)
assert_raises(ValueError, np.count_nonzero, m, axis=(1, 1))
assert_raises(TypeError, np.count_nonzero, m, axis='foo')
assert_raises(np.AxisError, np.count_nonzero, m, axis=3)
assert_raises(TypeError, np.count_nonzero,
m, axis=np.array([[1], [2]]))
def test_count_nonzero_axis_all_dtypes(self):
# More thorough test that the axis argument is respected
# for all dtypes and responds correctly when presented with
# either integer or tuple arguments for axis
msg = "Mismatch for dtype: %s"
def assert_equal_w_dt(a, b, err_msg):
assert_equal(a.dtype, b.dtype, err_msg=err_msg)
assert_equal(a, b, err_msg=err_msg)
for dt in np.typecodes['All']:
err_msg = msg % (np.dtype(dt).name,)
if dt != 'V':
if dt != 'M':
m = np.zeros((3, 3), dtype=dt)
n = np.ones(1, dtype=dt)
m[0, 0] = n[0]
m[1, 0] = n[0]
else: # np.zeros doesn't work for np.datetime64
m = np.array(['1970-01-01'] * 9)
m = m.reshape((3, 3))
m[0, 0] = '1970-01-12'
m[1, 0] = '1970-01-12'
m = m.astype(dt)
expected = np.array([2, 0, 0], dtype=np.intp)
assert_equal_w_dt(np.count_nonzero(m, axis=0),
expected, err_msg=err_msg)
expected = np.array([1, 1, 0], dtype=np.intp)
assert_equal_w_dt(np.count_nonzero(m, axis=1),
expected, err_msg=err_msg)
expected = np.array(2)
assert_equal(np.count_nonzero(m, axis=(0, 1)),
expected, err_msg=err_msg)
assert_equal(np.count_nonzero(m, axis=None),
expected, err_msg=err_msg)
assert_equal(np.count_nonzero(m),
expected, err_msg=err_msg)
if dt == 'V':
# There are no 'nonzero' objects for np.void, so the testing
# setup is slightly different for this dtype
m = np.array([np.void(1)] * 6).reshape((2, 3))
expected = np.array([0, 0, 0], dtype=np.intp)
assert_equal_w_dt(np.count_nonzero(m, axis=0),
expected, err_msg=err_msg)
expected = np.array([0, 0], dtype=np.intp)
assert_equal_w_dt(np.count_nonzero(m, axis=1),
expected, err_msg=err_msg)
expected = np.array(0)
assert_equal(np.count_nonzero(m, axis=(0, 1)),
expected, err_msg=err_msg)
assert_equal(np.count_nonzero(m, axis=None),
expected, err_msg=err_msg)
assert_equal(np.count_nonzero(m),
expected, err_msg=err_msg)
def test_count_nonzero_axis_consistent(self):
# Check that the axis behaviour for valid axes in
# non-special cases is consistent (and therefore
# correct) by checking it against an integer array
# that is then casted to the generic object dtype
from itertools import combinations, permutations
axis = (0, 1, 2, 3)
size = (5, 5, 5, 5)
msg = "Mismatch for axis: %s"
rng = np.random.RandomState(1234)
m = rng.randint(-100, 100, size=size)
n = m.astype(object)
for length in range(len(axis)):
for combo in combinations(axis, length):
for perm in permutations(combo):
assert_equal(
np.count_nonzero(m, axis=perm),
np.count_nonzero(n, axis=perm),
err_msg=msg % (perm,))
def test_countnonzero_axis_empty(self):
a = np.array([[0, 0, 1], [1, 0, 1]])
assert_equal(np.count_nonzero(a, axis=()), a.astype(bool))
def test_countnonzero_keepdims(self):
a = np.array([[0, 0, 1, 0],
[0, 3, 5, 0],
[7, 9, 2, 0]])
assert_equal(np.count_nonzero(a, axis=0, keepdims=True),
[[1, 2, 3, 0]])
assert_equal(np.count_nonzero(a, axis=1, keepdims=True),
[[1], [2], [3]])
assert_equal(np.count_nonzero(a, keepdims=True),
[[6]])
def test_array_method(self):
# Tests that the array method
# call to nonzero works
m = np.array([[1, 0, 0], [4, 0, 6]])
tgt = [[0, 1, 1], [0, 0, 2]]
assert_equal(m.nonzero(), tgt)
def test_nonzero_invalid_object(self):
# gh-9295
a = np.array([np.array([1, 2]), 3], dtype=object)
assert_raises(ValueError, np.nonzero, a)
class BoolErrors:
def __bool__(self):
raise ValueError("Not allowed")
assert_raises(ValueError, np.nonzero, np.array([BoolErrors()]))
def test_nonzero_sideeffect_safety(self):
# gh-13631
class FalseThenTrue:
_val = False
def __bool__(self):
try:
return self._val
finally:
self._val = True
class TrueThenFalse:
_val = True
def __bool__(self):
try:
return self._val
finally:
self._val = False
# result grows on the second pass
a = np.array([True, FalseThenTrue()])
assert_raises(RuntimeError, np.nonzero, a)
a = np.array([[True], [FalseThenTrue()]])
assert_raises(RuntimeError, np.nonzero, a)
# result shrinks on the second pass
a = np.array([False, TrueThenFalse()])
assert_raises(RuntimeError, np.nonzero, a)
a = np.array([[False], [TrueThenFalse()]])
assert_raises(RuntimeError, np.nonzero, a)
def test_nonzero_exception_safe(self):
# gh-13930
class ThrowsAfter:
def __init__(self, iters):
self.iters_left = iters
def __bool__(self):
if self.iters_left == 0:
raise ValueError("called `iters` times")
self.iters_left -= 1
return True
"""
Test that a ValueError is raised instead of a SystemError
If the __bool__ function is called after the error state is set,
Python (cpython) will raise a SystemError.
"""
# assert that an exception in first pass is handled correctly
a = np.array([ThrowsAfter(5)]*10)
assert_raises(ValueError, np.nonzero, a)
# raise exception in second pass for 1-dimensional loop
a = np.array([ThrowsAfter(15)]*10)
assert_raises(ValueError, np.nonzero, a)
# raise exception in second pass for n-dimensional loop
a = np.array([[ThrowsAfter(15)]]*10)
assert_raises(ValueError, np.nonzero, a)
class TestIndex:
def test_boolean(self):
a = rand(3, 5, 8)
V = rand(5, 8)
g1 = randint(0, 5, size=15)
g2 = randint(0, 8, size=15)
V[g1, g2] = -V[g1, g2]
assert_((np.array([a[0][V > 0], a[1][V > 0], a[2][V > 0]]) == a[:, V > 0]).all())
def test_boolean_edgecase(self):
a = np.array([], dtype='int32')
b = np.array([], dtype='bool')
c = a[b]
assert_equal(c, [])
assert_equal(c.dtype, np.dtype('int32'))
class TestBinaryRepr:
def test_zero(self):
assert_equal(np.binary_repr(0), '0')
def test_positive(self):
assert_equal(np.binary_repr(10), '1010')
assert_equal(np.binary_repr(12522),
'11000011101010')
assert_equal(np.binary_repr(10736848),
'101000111101010011010000')
def test_negative(self):
assert_equal(np.binary_repr(-1), '-1')
assert_equal(np.binary_repr(-10), '-1010')
assert_equal(np.binary_repr(-12522),
'-11000011101010')
assert_equal(np.binary_repr(-10736848),
'-101000111101010011010000')
def test_sufficient_width(self):
assert_equal(np.binary_repr(0, width=5), '00000')
assert_equal(np.binary_repr(10, width=7), '0001010')
assert_equal(np.binary_repr(-5, width=7), '1111011')
def test_neg_width_boundaries(self):
# see gh-8670
# Ensure that the example in the issue does not
# break before proceeding to a more thorough test.
assert_equal(np.binary_repr(-128, width=8), '10000000')
for width in range(1, 11):
num = -2**(width - 1)
exp = '1' + (width - 1) * '0'
assert_equal(np.binary_repr(num, width=width), exp)
def test_large_neg_int64(self):
# See gh-14289.
assert_equal(np.binary_repr(np.int64(-2**62), width=64),
'11' + '0'*62)
class TestBaseRepr:
def test_base3(self):
assert_equal(np.base_repr(3**5, 3), '100000')
def test_positive(self):
assert_equal(np.base_repr(12, 10), '12')
assert_equal(np.base_repr(12, 10, 4), '000012')
assert_equal(np.base_repr(12, 4), '30')
assert_equal(np.base_repr(3731624803700888, 36), '10QR0ROFCEW')
def test_negative(self):
assert_equal(np.base_repr(-12, 10), '-12')
assert_equal(np.base_repr(-12, 10, 4), '-000012')
assert_equal(np.base_repr(-12, 4), '-30')
def test_base_range(self):
with assert_raises(ValueError):
np.base_repr(1, 1)
with assert_raises(ValueError):
np.base_repr(1, 37)
class TestArrayComparisons:
def test_array_equal(self):
res = np.array_equal(np.array([1, 2]), np.array([1, 2]))
assert_(res)
assert_(type(res) is bool)
res = np.array_equal(np.array([1, 2]), np.array([1, 2, 3]))
assert_(not res)
assert_(type(res) is bool)
res = np.array_equal(np.array([1, 2]), np.array([3, 4]))
assert_(not res)
assert_(type(res) is bool)
res = np.array_equal(np.array([1, 2]), np.array([1, 3]))
assert_(not res)
assert_(type(res) is bool)
res = np.array_equal(np.array(['a'], dtype='S1'), np.array(['a'], dtype='S1'))
assert_(res)
assert_(type(res) is bool)
res = np.array_equal(np.array([('a', 1)], dtype='S1,u4'),
np.array([('a', 1)], dtype='S1,u4'))
assert_(res)
assert_(type(res) is bool)
def test_array_equal_equal_nan(self):
# Test array_equal with equal_nan kwarg
a1 = np.array([1, 2, np.nan])
a2 = np.array([1, np.nan, 2])
a3 = np.array([1, 2, np.inf])
# equal_nan=False by default
assert_(not np.array_equal(a1, a1))
assert_(np.array_equal(a1, a1, equal_nan=True))
assert_(not np.array_equal(a1, a2, equal_nan=True))
# nan's not conflated with inf's
assert_(not np.array_equal(a1, a3, equal_nan=True))
# 0-D arrays
a = np.array(np.nan)
assert_(not np.array_equal(a, a))
assert_(np.array_equal(a, a, equal_nan=True))
# Non-float dtype - equal_nan should have no effect
a = np.array([1, 2, 3], dtype=int)
assert_(np.array_equal(a, a))
assert_(np.array_equal(a, a, equal_nan=True))
# Multi-dimensional array
a = np.array([[0, 1], [np.nan, 1]])
assert_(not np.array_equal(a, a))
assert_(np.array_equal(a, a, equal_nan=True))
# Complex values
a, b = [np.array([1 + 1j])]*2
a.real, b.imag = np.nan, np.nan
assert_(not np.array_equal(a, b, equal_nan=False))
assert_(np.array_equal(a, b, equal_nan=True))
def test_none_compares_elementwise(self):
a = np.array([None, 1, None], dtype=object)
assert_equal(a == None, [True, False, True])
assert_equal(a != None, [False, True, False])
a = np.ones(3)
assert_equal(a == None, [False, False, False])
assert_equal(a != None, [True, True, True])
def test_array_equiv(self):
res = np.array_equiv(np.array([1, 2]), np.array([1, 2]))
assert_(res)
assert_(type(res) is bool)
res = np.array_equiv(np.array([1, 2]), np.array([1, 2, 3]))
assert_(not res)
assert_(type(res) is bool)
res = np.array_equiv(np.array([1, 2]), np.array([3, 4]))
assert_(not res)
assert_(type(res) is bool)
res = np.array_equiv(np.array([1, 2]), np.array([1, 3]))
assert_(not res)
assert_(type(res) is bool)
res = np.array_equiv(np.array([1, 1]), np.array([1]))
assert_(res)
assert_(type(res) is bool)
res = np.array_equiv(np.array([1, 1]), np.array([[1], [1]]))
assert_(res)
assert_(type(res) is bool)
res = np.array_equiv(np.array([1, 2]), np.array([2]))
assert_(not res)
assert_(type(res) is bool)
res = np.array_equiv(np.array([1, 2]), np.array([[1], [2]]))
assert_(not res)
assert_(type(res) is bool)
res = np.array_equiv(np.array([1, 2]), np.array([[1, 2, 3], [4, 5, 6], [7, 8, 9]]))
assert_(not res)
assert_(type(res) is bool)
def assert_array_strict_equal(x, y):
assert_array_equal(x, y)
# Check flags, 32 bit arches typically don't provide 16 byte alignment
if ((x.dtype.alignment <= 8 or
np.intp().dtype.itemsize != 4) and
sys.platform != 'win32'):
assert_(x.flags == y.flags)
else:
assert_(x.flags.owndata == y.flags.owndata)
assert_(x.flags.writeable == y.flags.writeable)
assert_(x.flags.c_contiguous == y.flags.c_contiguous)
assert_(x.flags.f_contiguous == y.flags.f_contiguous)
assert_(x.flags.writebackifcopy == y.flags.writebackifcopy)
# check endianness
assert_(x.dtype.isnative == y.dtype.isnative)
class TestClip:
def setup(self):
self.nr = 5
self.nc = 3
def fastclip(self, a, m, M, out=None, casting=None):
if out is None:
if casting is None:
return a.clip(m, M)
else:
return a.clip(m, M, casting=casting)
else:
if casting is None:
return a.clip(m, M, out)
else:
return a.clip(m, M, out, casting=casting)
def clip(self, a, m, M, out=None):
# use slow-clip
selector = np.less(a, m) + 2*np.greater(a, M)
return selector.choose((a, m, M), out=out)
# Handy functions
def _generate_data(self, n, m):
return randn(n, m)
def _generate_data_complex(self, n, m):
return randn(n, m) + 1.j * rand(n, m)
def _generate_flt_data(self, n, m):
return (randn(n, m)).astype(np.float32)
def _neg_byteorder(self, a):
a = np.asarray(a)
if sys.byteorder == 'little':
a = a.astype(a.dtype.newbyteorder('>'))
else:
a = a.astype(a.dtype.newbyteorder('<'))
return a
def _generate_non_native_data(self, n, m):
data = randn(n, m)
data = self._neg_byteorder(data)
assert_(not data.dtype.isnative)
return data
def _generate_int_data(self, n, m):
return (10 * rand(n, m)).astype(np.int64)
def _generate_int32_data(self, n, m):
return (10 * rand(n, m)).astype(np.int32)
# Now the real test cases
@pytest.mark.parametrize("dtype", '?bhilqpBHILQPefdgFDGO')
def test_ones_pathological(self, dtype):
# for preservation of behavior described in
# gh-12519; amin > amax behavior may still change
# in the future
arr = np.ones(10, dtype=dtype)
expected = np.zeros(10, dtype=dtype)
actual = np.clip(arr, 1, 0)
if dtype == 'O':
assert actual.tolist() == expected.tolist()
else:
assert_equal(actual, expected)
def test_simple_double(self):
# Test native double input with scalar min/max.
a = self._generate_data(self.nr, self.nc)
m = 0.1
M = 0.6
ac = self.fastclip(a, m, M)
act = self.clip(a, m, M)
assert_array_strict_equal(ac, act)
def test_simple_int(self):
# Test native int input with scalar min/max.
a = self._generate_int_data(self.nr, self.nc)
a = a.astype(int)
m = -2
M = 4
ac = self.fastclip(a, m, M)
act = self.clip(a, m, M)
assert_array_strict_equal(ac, act)
def test_array_double(self):
# Test native double input with array min/max.
a = self._generate_data(self.nr, self.nc)
m = np.zeros(a.shape)
M = m + 0.5
ac = self.fastclip(a, m, M)
act = self.clip(a, m, M)
assert_array_strict_equal(ac, act)
def test_simple_nonnative(self):
# Test non native double input with scalar min/max.
# Test native double input with non native double scalar min/max.
a = self._generate_non_native_data(self.nr, self.nc)
m = -0.5
M = 0.6
ac = self.fastclip(a, m, M)
act = self.clip(a, m, M)
assert_array_equal(ac, act)
# Test native double input with non native double scalar min/max.
a = self._generate_data(self.nr, self.nc)
m = -0.5
M = self._neg_byteorder(0.6)
assert_(not M.dtype.isnative)
ac = self.fastclip(a, m, M)
act = self.clip(a, m, M)
assert_array_equal(ac, act)
def test_simple_complex(self):
# Test native complex input with native double scalar min/max.
# Test native input with complex double scalar min/max.
a = 3 * self._generate_data_complex(self.nr, self.nc)
m = -0.5
M = 1.
ac = self.fastclip(a, m, M)
act = self.clip(a, m, M)
assert_array_strict_equal(ac, act)
# Test native input with complex double scalar min/max.
a = 3 * self._generate_data(self.nr, self.nc)
m = -0.5 + 1.j
M = 1. + 2.j
ac = self.fastclip(a, m, M)
act = self.clip(a, m, M)
assert_array_strict_equal(ac, act)
def test_clip_complex(self):
# Address Issue gh-5354 for clipping complex arrays
# Test native complex input without explicit min/max
# ie, either min=None or max=None
a = np.ones(10, dtype=complex)
m = a.min()
M = a.max()
am = self.fastclip(a, m, None)
aM = self.fastclip(a, None, M)
assert_array_strict_equal(am, a)
assert_array_strict_equal(aM, a)
def test_clip_non_contig(self):
# Test clip for non contiguous native input and native scalar min/max.
a = self._generate_data(self.nr * 2, self.nc * 3)
a = a[::2, ::3]
assert_(not a.flags['F_CONTIGUOUS'])
assert_(not a.flags['C_CONTIGUOUS'])
ac = self.fastclip(a, -1.6, 1.7)
act = self.clip(a, -1.6, 1.7)
assert_array_strict_equal(ac, act)
def test_simple_out(self):
# Test native double input with scalar min/max.
a = self._generate_data(self.nr, self.nc)
m = -0.5
M = 0.6
ac = np.zeros(a.shape)
act = np.zeros(a.shape)
self.fastclip(a, m, M, ac)
self.clip(a, m, M, act)
assert_array_strict_equal(ac, act)
@pytest.mark.parametrize("casting", [None, "unsafe"])
def test_simple_int32_inout(self, casting):
# Test native int32 input with double min/max and int32 out.
a = self._generate_int32_data(self.nr, self.nc)
m = np.float64(0)
M = np.float64(2)
ac = np.zeros(a.shape, dtype=np.int32)
act = ac.copy()
if casting is None:
with assert_warns(DeprecationWarning):
# NumPy 1.17.0, 2018-02-24 - casting is unsafe
self.fastclip(a, m, M, ac, casting=casting)
else:
# explicitly passing "unsafe" will silence warning
self.fastclip(a, m, M, ac, casting=casting)
self.clip(a, m, M, act)
assert_array_strict_equal(ac, act)
def test_simple_int64_out(self):
# Test native int32 input with int32 scalar min/max and int64 out.
a = self._generate_int32_data(self.nr, self.nc)
m = np.int32(-1)
M = np.int32(1)
ac = np.zeros(a.shape, dtype=np.int64)
act = ac.copy()
self.fastclip(a, m, M, ac)
self.clip(a, m, M, act)
assert_array_strict_equal(ac, act)
def test_simple_int64_inout(self):
# Test native int32 input with double array min/max and int32 out.
a = self._generate_int32_data(self.nr, self.nc)
m = np.zeros(a.shape, np.float64)
M = np.float64(1)
ac = np.zeros(a.shape, dtype=np.int32)
act = ac.copy()
with assert_warns(DeprecationWarning):
# NumPy 1.17.0, 2018-02-24 - casting is unsafe
self.fastclip(a, m, M, ac)
self.clip(a, m, M, act)
assert_array_strict_equal(ac, act)
def test_simple_int32_out(self):
# Test native double input with scalar min/max and int out.
a = self._generate_data(self.nr, self.nc)
m = -1.0
M = 2.0
ac = np.zeros(a.shape, dtype=np.int32)
act = ac.copy()
with assert_warns(DeprecationWarning):
# NumPy 1.17.0, 2018-02-24 - casting is unsafe
self.fastclip(a, m, M, ac)
self.clip(a, m, M, act)
assert_array_strict_equal(ac, act)
def test_simple_inplace_01(self):
# Test native double input with array min/max in-place.
a = self._generate_data(self.nr, self.nc)
ac = a.copy()
m = np.zeros(a.shape)
M = 1.0
self.fastclip(a, m, M, a)
self.clip(a, m, M, ac)
assert_array_strict_equal(a, ac)
def test_simple_inplace_02(self):
# Test native double input with scalar min/max in-place.
a = self._generate_data(self.nr, self.nc)
ac = a.copy()
m = -0.5
M = 0.6
self.fastclip(a, m, M, a)
self.clip(ac, m, M, ac)
assert_array_strict_equal(a, ac)
def test_noncontig_inplace(self):
# Test non contiguous double input with double scalar min/max in-place.
a = self._generate_data(self.nr * 2, self.nc * 3)
a = a[::2, ::3]
assert_(not a.flags['F_CONTIGUOUS'])
assert_(not a.flags['C_CONTIGUOUS'])
ac = a.copy()
m = -0.5
M = 0.6
self.fastclip(a, m, M, a)
self.clip(ac, m, M, ac)
assert_array_equal(a, ac)
def test_type_cast_01(self):
# Test native double input with scalar min/max.
a = self._generate_data(self.nr, self.nc)
m = -0.5
M = 0.6
ac = self.fastclip(a, m, M)
act = self.clip(a, m, M)
assert_array_strict_equal(ac, act)
def test_type_cast_02(self):
# Test native int32 input with int32 scalar min/max.
a = self._generate_int_data(self.nr, self.nc)
a = a.astype(np.int32)
m = -2
M = 4
ac = self.fastclip(a, m, M)
act = self.clip(a, m, M)
assert_array_strict_equal(ac, act)
def test_type_cast_03(self):
# Test native int32 input with float64 scalar min/max.
a = self._generate_int32_data(self.nr, self.nc)
m = -2
M = 4
ac = self.fastclip(a, np.float64(m), np.float64(M))
act = self.clip(a, np.float64(m), np.float64(M))
assert_array_strict_equal(ac, act)
def test_type_cast_04(self):
# Test native int32 input with float32 scalar min/max.
a = self._generate_int32_data(self.nr, self.nc)
m = np.float32(-2)
M = np.float32(4)
act = self.fastclip(a, m, M)
ac = self.clip(a, m, M)
assert_array_strict_equal(ac, act)
def test_type_cast_05(self):
# Test native int32 with double arrays min/max.
a = self._generate_int_data(self.nr, self.nc)
m = -0.5
M = 1.
ac = self.fastclip(a, m * np.zeros(a.shape), M)
act = self.clip(a, m * np.zeros(a.shape), M)
assert_array_strict_equal(ac, act)
def test_type_cast_06(self):
# Test native with NON native scalar min/max.
a = self._generate_data(self.nr, self.nc)
m = 0.5
m_s = self._neg_byteorder(m)
M = 1.
act = self.clip(a, m_s, M)
ac = self.fastclip(a, m_s, M)
assert_array_strict_equal(ac, act)
def test_type_cast_07(self):
# Test NON native with native array min/max.
a = self._generate_data(self.nr, self.nc)
m = -0.5 * np.ones(a.shape)
M = 1.
a_s = self._neg_byteorder(a)
assert_(not a_s.dtype.isnative)
act = a_s.clip(m, M)
ac = self.fastclip(a_s, m, M)
assert_array_strict_equal(ac, act)
def test_type_cast_08(self):
# Test NON native with native scalar min/max.
a = self._generate_data(self.nr, self.nc)
m = -0.5
M = 1.
a_s = self._neg_byteorder(a)
assert_(not a_s.dtype.isnative)
ac = self.fastclip(a_s, m, M)
act = a_s.clip(m, M)
assert_array_strict_equal(ac, act)
def test_type_cast_09(self):
# Test native with NON native array min/max.
a = self._generate_data(self.nr, self.nc)
m = -0.5 * np.ones(a.shape)
M = 1.
m_s = self._neg_byteorder(m)
assert_(not m_s.dtype.isnative)
ac = self.fastclip(a, m_s, M)
act = self.clip(a, m_s, M)
assert_array_strict_equal(ac, act)
def test_type_cast_10(self):
# Test native int32 with float min/max and float out for output argument.
a = self._generate_int_data(self.nr, self.nc)
b = np.zeros(a.shape, dtype=np.float32)
m = np.float32(-0.5)
M = np.float32(1)
act = self.clip(a, m, M, out=b)
ac = self.fastclip(a, m, M, out=b)
assert_array_strict_equal(ac, act)
def test_type_cast_11(self):
# Test non native with native scalar, min/max, out non native
a = self._generate_non_native_data(self.nr, self.nc)
b = a.copy()
b = b.astype(b.dtype.newbyteorder('>'))
bt = b.copy()
m = -0.5
M = 1.
self.fastclip(a, m, M, out=b)
self.clip(a, m, M, out=bt)
assert_array_strict_equal(b, bt)
def test_type_cast_12(self):
# Test native int32 input and min/max and float out
a = self._generate_int_data(self.nr, self.nc)
b = np.zeros(a.shape, dtype=np.float32)
m = np.int32(0)
M = np.int32(1)
act = self.clip(a, m, M, out=b)
ac = self.fastclip(a, m, M, out=b)
assert_array_strict_equal(ac, act)
def test_clip_with_out_simple(self):
# Test native double input with scalar min/max
a = self._generate_data(self.nr, self.nc)
m = -0.5
M = 0.6
ac = np.zeros(a.shape)
act = np.zeros(a.shape)
self.fastclip(a, m, M, ac)
self.clip(a, m, M, act)
assert_array_strict_equal(ac, act)
def test_clip_with_out_simple2(self):
# Test native int32 input with double min/max and int32 out
a = self._generate_int32_data(self.nr, self.nc)
m = np.float64(0)
M = np.float64(2)
ac = np.zeros(a.shape, dtype=np.int32)
act = ac.copy()
with assert_warns(DeprecationWarning):
# NumPy 1.17.0, 2018-02-24 - casting is unsafe
self.fastclip(a, m, M, ac)
self.clip(a, m, M, act)
assert_array_strict_equal(ac, act)
def test_clip_with_out_simple_int32(self):
# Test native int32 input with int32 scalar min/max and int64 out
a = self._generate_int32_data(self.nr, self.nc)
m = np.int32(-1)
M = np.int32(1)
ac = np.zeros(a.shape, dtype=np.int64)
act = ac.copy()
self.fastclip(a, m, M, ac)
self.clip(a, m, M, act)
assert_array_strict_equal(ac, act)
def test_clip_with_out_array_int32(self):
# Test native int32 input with double array min/max and int32 out
a = self._generate_int32_data(self.nr, self.nc)
m = np.zeros(a.shape, np.float64)
M = np.float64(1)
ac = np.zeros(a.shape, dtype=np.int32)
act = ac.copy()
with assert_warns(DeprecationWarning):
# NumPy 1.17.0, 2018-02-24 - casting is unsafe
self.fastclip(a, m, M, ac)
self.clip(a, m, M, act)
assert_array_strict_equal(ac, act)
def test_clip_with_out_array_outint32(self):
# Test native double input with scalar min/max and int out
a = self._generate_data(self.nr, self.nc)
m = -1.0
M = 2.0
ac = np.zeros(a.shape, dtype=np.int32)
act = ac.copy()
with assert_warns(DeprecationWarning):
# NumPy 1.17.0, 2018-02-24 - casting is unsafe
self.fastclip(a, m, M, ac)
self.clip(a, m, M, act)
assert_array_strict_equal(ac, act)
def test_clip_with_out_transposed(self):
# Test that the out argument works when transposed
a = np.arange(16).reshape(4, 4)
out = np.empty_like(a).T
a.clip(4, 10, out=out)
expected = self.clip(a, 4, 10)
assert_array_equal(out, expected)
def test_clip_with_out_memory_overlap(self):
# Test that the out argument works when it has memory overlap
a = np.arange(16).reshape(4, 4)
ac = a.copy()
a[:-1].clip(4, 10, out=a[1:])
expected = self.clip(ac[:-1], 4, 10)
assert_array_equal(a[1:], expected)
def test_clip_inplace_array(self):
# Test native double input with array min/max
a = self._generate_data(self.nr, self.nc)
ac = a.copy()
m = np.zeros(a.shape)
M = 1.0
self.fastclip(a, m, M, a)
self.clip(a, m, M, ac)
assert_array_strict_equal(a, ac)
def test_clip_inplace_simple(self):
# Test native double input with scalar min/max
a = self._generate_data(self.nr, self.nc)
ac = a.copy()
m = -0.5
M = 0.6
self.fastclip(a, m, M, a)
self.clip(a, m, M, ac)
assert_array_strict_equal(a, ac)
def test_clip_func_takes_out(self):
# Ensure that the clip() function takes an out=argument.
a = self._generate_data(self.nr, self.nc)
ac = a.copy()
m = -0.5
M = 0.6
a2 = np.clip(a, m, M, out=a)
self.clip(a, m, M, ac)
assert_array_strict_equal(a2, ac)
assert_(a2 is a)
def test_clip_nan(self):
d = np.arange(7.)
with assert_warns(DeprecationWarning):
assert_equal(d.clip(min=np.nan), d)
with assert_warns(DeprecationWarning):
assert_equal(d.clip(max=np.nan), d)
with assert_warns(DeprecationWarning):
assert_equal(d.clip(min=np.nan, max=np.nan), d)
with assert_warns(DeprecationWarning):
assert_equal(d.clip(min=-2, max=np.nan), d)
with assert_warns(DeprecationWarning):
assert_equal(d.clip(min=np.nan, max=10), d)
def test_object_clip(self):
a = np.arange(10, dtype=object)
actual = np.clip(a, 1, 5)
expected = np.array([1, 1, 2, 3, 4, 5, 5, 5, 5, 5])
assert actual.tolist() == expected.tolist()
def test_clip_all_none(self):
a = np.arange(10, dtype=object)
with assert_raises_regex(ValueError, 'max or min'):
np.clip(a, None, None)
def test_clip_invalid_casting(self):
a = np.arange(10, dtype=object)
with assert_raises_regex(ValueError,
'casting must be one of'):
self.fastclip(a, 1, 8, casting="garbage")
@pytest.mark.parametrize("amin, amax", [
# two scalars
(1, 0),
# mix scalar and array
(1, np.zeros(10)),
# two arrays
(np.ones(10), np.zeros(10)),
])
def test_clip_value_min_max_flip(self, amin, amax):
a = np.arange(10, dtype=np.int64)
# requirement from ufunc_docstrings.py
expected = np.minimum(np.maximum(a, amin), amax)
actual = np.clip(a, amin, amax)
assert_equal(actual, expected)
@pytest.mark.parametrize("arr, amin, amax, exp", [
# for a bug in npy_ObjectClip, based on a
# case produced by hypothesis
(np.zeros(10, dtype=np.int64),
0,
-2**64+1,
np.full(10, -2**64+1, dtype=object)),
# for bugs in NPY_TIMEDELTA_MAX, based on a case
# produced by hypothesis
(np.zeros(10, dtype='m8') - 1,
0,
0,
np.zeros(10, dtype='m8')),
])
def test_clip_problem_cases(self, arr, amin, amax, exp):
actual = np.clip(arr, amin, amax)
assert_equal(actual, exp)
@pytest.mark.xfail(reason="no scalar nan propagation yet",
raises=AssertionError,
strict=True)
@pytest.mark.parametrize("arr, amin, amax", [
# problematic scalar nan case from hypothesis
(np.zeros(10, dtype=np.int64),
np.array(np.nan),
np.zeros(10, dtype=np.int32)),
])
@pytest.mark.filterwarnings("ignore::DeprecationWarning")
def test_clip_scalar_nan_propagation(self, arr, amin, amax):
# enforcement of scalar nan propagation for comparisons
# called through clip()
expected = np.minimum(np.maximum(arr, amin), amax)
actual = np.clip(arr, amin, amax)
assert_equal(actual, expected)
@pytest.mark.xfail(reason="propagation doesn't match spec")
@pytest.mark.parametrize("arr, amin, amax", [
(np.array([1] * 10, dtype='m8'),
np.timedelta64('NaT'),
np.zeros(10, dtype=np.int32)),
])
@pytest.mark.filterwarnings("ignore::DeprecationWarning")
def test_NaT_propagation(self, arr, amin, amax):
# NOTE: the expected function spec doesn't
# propagate NaT, but clip() now does
expected = np.minimum(np.maximum(arr, amin), amax)
actual = np.clip(arr, amin, amax)
assert_equal(actual, expected)
@given(data=st.data(), shape=hynp.array_shapes())
def test_clip_property(self, data, shape):
"""A property-based test using Hypothesis.
This aims for maximum generality: it could in principle generate *any*
valid inputs to np.clip, and in practice generates much more varied
inputs than human testers come up with.
Because many of the inputs have tricky dependencies - compatible dtypes
and mutually-broadcastable shapes - we use `st.data()` strategy draw
values *inside* the test function, from strategies we construct based
on previous values. An alternative would be to define a custom strategy
with `@st.composite`, but until we have duplicated code inline is fine.
That accounts for most of the function; the actual test is just three
lines to calculate and compare actual vs expected results!
"""
# Our base array and bounds should not need to be of the same type as
# long as they are all compatible - so we allow any int or float type.
dtype_strategy = hynp.integer_dtypes() | hynp.floating_dtypes()
# The following line is a total hack to disable the varied-dtypes
# component of this test, because result != expected if dtypes can vary.
dtype_strategy = st.just(data.draw(dtype_strategy))
# Generate an arbitrary array of the chosen shape and dtype
# This is the value that we clip.
arr = data.draw(hynp.arrays(dtype=dtype_strategy, shape=shape))
# Generate shapes for the bounds which can be broadcast with each other
# and with the base shape. Below, we might decide to use scalar bounds,
# but it's clearer to generate these shapes unconditionally in advance.
in_shapes, result_shape = data.draw(
hynp.mutually_broadcastable_shapes(
num_shapes=2,
base_shape=shape,
# Commenting out the min_dims line allows zero-dimensional arrays,
# and zero-dimensional arrays containing NaN make the test fail.
min_dims=1
)
)
amin = data.draw(
dtype_strategy.flatmap(hynp.from_dtype)
| hynp.arrays(dtype=dtype_strategy, shape=in_shapes[0])
)
amax = data.draw(
dtype_strategy.flatmap(hynp.from_dtype)
| hynp.arrays(dtype=dtype_strategy, shape=in_shapes[1])
)
# If we allow either bound to be a scalar `nan`, the test will fail -
# so we just "assume" that away (if it is, this raises a special
# exception and Hypothesis will try again with different inputs)
assume(not np.isscalar(amin) or not np.isnan(amin))
assume(not np.isscalar(amax) or not np.isnan(amax))
# Then calculate our result and expected result and check that they're
# equal! See gh-12519 for discussion deciding on this property.
result = np.clip(arr, amin, amax)
expected = np.minimum(amax, np.maximum(arr, amin))
assert_array_equal(result, expected)
class TestAllclose:
rtol = 1e-5
atol = 1e-8
def setup(self):
self.olderr = np.seterr(invalid='ignore')
def teardown(self):
np.seterr(**self.olderr)
def tst_allclose(self, x, y):
assert_(np.allclose(x, y), "%s and %s not close" % (x, y))
def tst_not_allclose(self, x, y):
assert_(not np.allclose(x, y), "%s and %s shouldn't be close" % (x, y))
def test_ip_allclose(self):
# Parametric test factory.
arr = np.array([100, 1000])
aran = np.arange(125).reshape((5, 5, 5))
atol = self.atol
rtol = self.rtol
data = [([1, 0], [1, 0]),
([atol], [0]),
([1], [1+rtol+atol]),
(arr, arr + arr*rtol),
(arr, arr + arr*rtol + atol*2),
(aran, aran + aran*rtol),
(np.inf, np.inf),
(np.inf, [np.inf])]
for (x, y) in data:
self.tst_allclose(x, y)
def test_ip_not_allclose(self):
# Parametric test factory.
aran = np.arange(125).reshape((5, 5, 5))
atol = self.atol
rtol = self.rtol
data = [([np.inf, 0], [1, np.inf]),
([np.inf, 0], [1, 0]),
([np.inf, np.inf], [1, np.inf]),
([np.inf, np.inf], [1, 0]),
([-np.inf, 0], [np.inf, 0]),
([np.nan, 0], [np.nan, 0]),
([atol*2], [0]),
([1], [1+rtol+atol*2]),
(aran, aran + aran*atol + atol*2),
(np.array([np.inf, 1]), np.array([0, np.inf]))]
for (x, y) in data:
self.tst_not_allclose(x, y)
def test_no_parameter_modification(self):
x = np.array([np.inf, 1])
y = np.array([0, np.inf])
np.allclose(x, y)
assert_array_equal(x, np.array([np.inf, 1]))
assert_array_equal(y, np.array([0, np.inf]))
def test_min_int(self):
# Could make problems because of abs(min_int) == min_int
min_int = np.iinfo(np.int_).min
a = np.array([min_int], dtype=np.int_)
assert_(np.allclose(a, a))
def test_equalnan(self):
x = np.array([1.0, np.nan])
assert_(np.allclose(x, x, equal_nan=True))
def test_return_class_is_ndarray(self):
# Issue gh-6475
# Check that allclose does not preserve subtypes
class Foo(np.ndarray):
def __new__(cls, *args, **kwargs):
return np.array(*args, **kwargs).view(cls)
a = Foo([1])
assert_(type(np.allclose(a, a)) is bool)
class TestIsclose:
rtol = 1e-5
atol = 1e-8
def setup(self):
atol = self.atol
rtol = self.rtol
arr = np.array([100, 1000])
aran = np.arange(125).reshape((5, 5, 5))
self.all_close_tests = [
([1, 0], [1, 0]),
([atol], [0]),
([1], [1 + rtol + atol]),
(arr, arr + arr*rtol),
(arr, arr + arr*rtol + atol),
(aran, aran + aran*rtol),
(np.inf, np.inf),
(np.inf, [np.inf]),
([np.inf, -np.inf], [np.inf, -np.inf]),
]
self.none_close_tests = [
([np.inf, 0], [1, np.inf]),
([np.inf, -np.inf], [1, 0]),
([np.inf, np.inf], [1, -np.inf]),
([np.inf, np.inf], [1, 0]),
([np.nan, 0], [np.nan, -np.inf]),
([atol*2], [0]),
([1], [1 + rtol + atol*2]),
(aran, aran + rtol*1.1*aran + atol*1.1),
(np.array([np.inf, 1]), np.array([0, np.inf])),
]
self.some_close_tests = [
([np.inf, 0], [np.inf, atol*2]),
([atol, 1, 1e6*(1 + 2*rtol) + atol], [0, np.nan, 1e6]),
(np.arange(3), [0, 1, 2.1]),
(np.nan, [np.nan, np.nan, np.nan]),
([0], [atol, np.inf, -np.inf, np.nan]),
(0, [atol, np.inf, -np.inf, np.nan]),
]
self.some_close_results = [
[True, False],
[True, False, False],
[True, True, False],
[False, False, False],
[True, False, False, False],
[True, False, False, False],
]
def test_ip_isclose(self):
self.setup()
tests = self.some_close_tests
results = self.some_close_results
for (x, y), result in zip(tests, results):
assert_array_equal(np.isclose(x, y), result)
def tst_all_isclose(self, x, y):
assert_(np.all(np.isclose(x, y)), "%s and %s not close" % (x, y))
def tst_none_isclose(self, x, y):
msg = "%s and %s shouldn't be close"
assert_(not np.any(np.isclose(x, y)), msg % (x, y))
def tst_isclose_allclose(self, x, y):
msg = "isclose.all() and allclose aren't same for %s and %s"
msg2 = "isclose and allclose aren't same for %s and %s"
if np.isscalar(x) and np.isscalar(y):
assert_(np.isclose(x, y) == np.allclose(x, y), msg=msg2 % (x, y))
else:
assert_array_equal(np.isclose(x, y).all(), np.allclose(x, y), msg % (x, y))
def test_ip_all_isclose(self):
self.setup()
for (x, y) in self.all_close_tests:
self.tst_all_isclose(x, y)
def test_ip_none_isclose(self):
self.setup()
for (x, y) in self.none_close_tests:
self.tst_none_isclose(x, y)
def test_ip_isclose_allclose(self):
self.setup()
tests = (self.all_close_tests + self.none_close_tests +
self.some_close_tests)
for (x, y) in tests:
self.tst_isclose_allclose(x, y)
def test_equal_nan(self):
assert_array_equal(np.isclose(np.nan, np.nan, equal_nan=True), [True])
arr = np.array([1.0, np.nan])
assert_array_equal(np.isclose(arr, arr, equal_nan=True), [True, True])
def test_masked_arrays(self):
# Make sure to test the output type when arguments are interchanged.
x = np.ma.masked_where([True, True, False], np.arange(3))
assert_(type(x) is type(np.isclose(2, x)))
assert_(type(x) is type(np.isclose(x, 2)))
x = np.ma.masked_where([True, True, False], [np.nan, np.inf, np.nan])
assert_(type(x) is type(np.isclose(np.inf, x)))
assert_(type(x) is type(np.isclose(x, np.inf)))
x = np.ma.masked_where([True, True, False], [np.nan, np.nan, np.nan])
y = np.isclose(np.nan, x, equal_nan=True)
assert_(type(x) is type(y))
# Ensure that the mask isn't modified...
assert_array_equal([True, True, False], y.mask)
y = np.isclose(x, np.nan, equal_nan=True)
assert_(type(x) is type(y))
# Ensure that the mask isn't modified...
assert_array_equal([True, True, False], y.mask)
x = np.ma.masked_where([True, True, False], [np.nan, np.nan, np.nan])
y = np.isclose(x, x, equal_nan=True)
assert_(type(x) is type(y))
# Ensure that the mask isn't modified...
assert_array_equal([True, True, False], y.mask)
def test_scalar_return(self):
assert_(np.isscalar(np.isclose(1, 1)))
def test_no_parameter_modification(self):
x = np.array([np.inf, 1])
y = np.array([0, np.inf])
np.isclose(x, y)
assert_array_equal(x, np.array([np.inf, 1]))
assert_array_equal(y, np.array([0, np.inf]))
def test_non_finite_scalar(self):
# GH7014, when two scalars are compared the output should also be a
# scalar
assert_(np.isclose(np.inf, -np.inf) is np.False_)
assert_(np.isclose(0, np.inf) is np.False_)
assert_(type(np.isclose(0, np.inf)) is np.bool_)
class TestStdVar:
def setup(self):
self.A = np.array([1, -1, 1, -1])
self.real_var = 1
def test_basic(self):
assert_almost_equal(np.var(self.A), self.real_var)
assert_almost_equal(np.std(self.A)**2, self.real_var)
def test_scalars(self):
assert_equal(np.var(1), 0)
assert_equal(np.std(1), 0)
def test_ddof1(self):
assert_almost_equal(np.var(self.A, ddof=1),
self.real_var*len(self.A)/float(len(self.A)-1))
assert_almost_equal(np.std(self.A, ddof=1)**2,
self.real_var*len(self.A)/float(len(self.A)-1))
def test_ddof2(self):
assert_almost_equal(np.var(self.A, ddof=2),
self.real_var*len(self.A)/float(len(self.A)-2))
assert_almost_equal(np.std(self.A, ddof=2)**2,
self.real_var*len(self.A)/float(len(self.A)-2))
def test_out_scalar(self):
d = np.arange(10)
out = np.array(0.)
r = np.std(d, out=out)
assert_(r is out)
assert_array_equal(r, out)
r = np.var(d, out=out)
assert_(r is out)
assert_array_equal(r, out)
r = np.mean(d, out=out)
assert_(r is out)
assert_array_equal(r, out)
class TestStdVarComplex:
def test_basic(self):
A = np.array([1, 1.j, -1, -1.j])
real_var = 1
assert_almost_equal(np.var(A), real_var)
assert_almost_equal(np.std(A)**2, real_var)
def test_scalars(self):
assert_equal(np.var(1j), 0)
assert_equal(np.std(1j), 0)
class TestCreationFuncs:
# Test ones, zeros, empty and full.
def setup(self):
dtypes = {np.dtype(tp) for tp in itertools.chain(*np.sctypes.values())}
# void, bytes, str
variable_sized = {tp for tp in dtypes if tp.str.endswith('0')}
self.dtypes = sorted(dtypes - variable_sized |
{np.dtype(tp.str.replace("0", str(i)))
for tp in variable_sized for i in range(1, 10)},
key=lambda dtype: dtype.str)
self.orders = {'C': 'c_contiguous', 'F': 'f_contiguous'}
self.ndims = 10
def check_function(self, func, fill_value=None):
par = ((0, 1, 2),
range(self.ndims),
self.orders,
self.dtypes)
fill_kwarg = {}
if fill_value is not None:
fill_kwarg = {'fill_value': fill_value}
for size, ndims, order, dtype in itertools.product(*par):
shape = ndims * [size]
# do not fill void type
if fill_kwarg and dtype.str.startswith('|V'):
continue
arr = func(shape, order=order, dtype=dtype,
**fill_kwarg)
assert_equal(arr.dtype, dtype)
assert_(getattr(arr.flags, self.orders[order]))
if fill_value is not None:
if dtype.str.startswith('|S'):
val = str(fill_value)
else:
val = fill_value
assert_equal(arr, dtype.type(val))
def test_zeros(self):
self.check_function(np.zeros)
def test_ones(self):
self.check_function(np.ones)
def test_empty(self):
self.check_function(np.empty)
def test_full(self):
self.check_function(np.full, 0)
self.check_function(np.full, 1)
@pytest.mark.skipif(not HAS_REFCOUNT, reason="Python lacks refcounts")
def test_for_reference_leak(self):
# Make sure we have an object for reference
dim = 1
beg = sys.getrefcount(dim)
np.zeros([dim]*10)
assert_(sys.getrefcount(dim) == beg)
np.ones([dim]*10)
assert_(sys.getrefcount(dim) == beg)
np.empty([dim]*10)
assert_(sys.getrefcount(dim) == beg)
np.full([dim]*10, 0)
assert_(sys.getrefcount(dim) == beg)
class TestLikeFuncs:
'''Test ones_like, zeros_like, empty_like and full_like'''
def setup(self):
self.data = [
# Array scalars
(np.array(3.), None),
(np.array(3), 'f8'),
# 1D arrays
(np.arange(6, dtype='f4'), None),
(np.arange(6), 'c16'),
# 2D C-layout arrays
(np.arange(6).reshape(2, 3), None),
(np.arange(6).reshape(3, 2), 'i1'),
# 2D F-layout arrays
(np.arange(6).reshape((2, 3), order='F'), None),
(np.arange(6).reshape((3, 2), order='F'), 'i1'),
# 3D C-layout arrays
(np.arange(24).reshape(2, 3, 4), None),
(np.arange(24).reshape(4, 3, 2), 'f4'),
# 3D F-layout arrays
(np.arange(24).reshape((2, 3, 4), order='F'), None),
(np.arange(24).reshape((4, 3, 2), order='F'), 'f4'),
# 3D non-C/F-layout arrays
(np.arange(24).reshape(2, 3, 4).swapaxes(0, 1), None),
(np.arange(24).reshape(4, 3, 2).swapaxes(0, 1), '?'),
]
self.shapes = [(), (5,), (5,6,), (5,6,7,)]
def compare_array_value(self, dz, value, fill_value):
if value is not None:
if fill_value:
try:
z = dz.dtype.type(value)
except OverflowError:
pass
else:
assert_(np.all(dz == z))
else:
assert_(np.all(dz == value))
def check_like_function(self, like_function, value, fill_value=False):
if fill_value:
fill_kwarg = {'fill_value': value}
else:
fill_kwarg = {}
for d, dtype in self.data:
# default (K) order, dtype
dz = like_function(d, dtype=dtype, **fill_kwarg)
assert_equal(dz.shape, d.shape)
assert_equal(np.array(dz.strides)*d.dtype.itemsize,
np.array(d.strides)*dz.dtype.itemsize)
assert_equal(d.flags.c_contiguous, dz.flags.c_contiguous)
assert_equal(d.flags.f_contiguous, dz.flags.f_contiguous)
if dtype is None:
assert_equal(dz.dtype, d.dtype)
else:
assert_equal(dz.dtype, np.dtype(dtype))
self.compare_array_value(dz, value, fill_value)
# C order, default dtype
dz = like_function(d, order='C', dtype=dtype, **fill_kwarg)
assert_equal(dz.shape, d.shape)
assert_(dz.flags.c_contiguous)
if dtype is None:
assert_equal(dz.dtype, d.dtype)
else:
assert_equal(dz.dtype, np.dtype(dtype))
self.compare_array_value(dz, value, fill_value)
# F order, default dtype
dz = like_function(d, order='F', dtype=dtype, **fill_kwarg)
assert_equal(dz.shape, d.shape)
assert_(dz.flags.f_contiguous)
if dtype is None:
assert_equal(dz.dtype, d.dtype)
else:
assert_equal(dz.dtype, np.dtype(dtype))
self.compare_array_value(dz, value, fill_value)
# A order
dz = like_function(d, order='A', dtype=dtype, **fill_kwarg)
assert_equal(dz.shape, d.shape)
if d.flags.f_contiguous:
assert_(dz.flags.f_contiguous)
else:
assert_(dz.flags.c_contiguous)
if dtype is None:
assert_equal(dz.dtype, d.dtype)
else:
assert_equal(dz.dtype, np.dtype(dtype))
self.compare_array_value(dz, value, fill_value)
# Test the 'shape' parameter
for s in self.shapes:
for o in 'CFA':
sz = like_function(d, dtype=dtype, shape=s, order=o,
**fill_kwarg)
assert_equal(sz.shape, s)
if dtype is None:
assert_equal(sz.dtype, d.dtype)
else:
assert_equal(sz.dtype, np.dtype(dtype))
if o == 'C' or (o == 'A' and d.flags.c_contiguous):
assert_(sz.flags.c_contiguous)
elif o == 'F' or (o == 'A' and d.flags.f_contiguous):
assert_(sz.flags.f_contiguous)
self.compare_array_value(sz, value, fill_value)
if (d.ndim != len(s)):
assert_equal(np.argsort(like_function(d, dtype=dtype,
shape=s, order='K',
**fill_kwarg).strides),
np.argsort(np.empty(s, dtype=dtype,
order='C').strides))
else:
assert_equal(np.argsort(like_function(d, dtype=dtype,
shape=s, order='K',
**fill_kwarg).strides),
np.argsort(d.strides))
# Test the 'subok' parameter
class MyNDArray(np.ndarray):
pass
a = np.array([[1, 2], [3, 4]]).view(MyNDArray)
b = like_function(a, **fill_kwarg)
assert_(type(b) is MyNDArray)
b = like_function(a, subok=False, **fill_kwarg)
assert_(type(b) is not MyNDArray)
def test_ones_like(self):
self.check_like_function(np.ones_like, 1)
def test_zeros_like(self):
self.check_like_function(np.zeros_like, 0)
def test_empty_like(self):
self.check_like_function(np.empty_like, None)
def test_filled_like(self):
self.check_like_function(np.full_like, 0, True)
self.check_like_function(np.full_like, 1, True)
self.check_like_function(np.full_like, 1000, True)
self.check_like_function(np.full_like, 123.456, True)
self.check_like_function(np.full_like, np.inf, True)
class TestCorrelate:
def _setup(self, dt):
self.x = np.array([1, 2, 3, 4, 5], dtype=dt)
self.xs = np.arange(1, 20)[::3]
self.y = np.array([-1, -2, -3], dtype=dt)
self.z1 = np.array([ -3., -8., -14., -20., -26., -14., -5.], dtype=dt)
self.z1_4 = np.array([-2., -5., -8., -11., -14., -5.], dtype=dt)
self.z1r = np.array([-15., -22., -22., -16., -10., -4., -1.], dtype=dt)
self.z2 = np.array([-5., -14., -26., -20., -14., -8., -3.], dtype=dt)
self.z2r = np.array([-1., -4., -10., -16., -22., -22., -15.], dtype=dt)
self.zs = np.array([-3., -14., -30., -48., -66., -84.,
-102., -54., -19.], dtype=dt)
def test_float(self):
self._setup(float)
z = np.correlate(self.x, self.y, 'full')
assert_array_almost_equal(z, self.z1)
z = np.correlate(self.x, self.y[:-1], 'full')
assert_array_almost_equal(z, self.z1_4)
z = np.correlate(self.y, self.x, 'full')
assert_array_almost_equal(z, self.z2)
z = np.correlate(self.x[::-1], self.y, 'full')
assert_array_almost_equal(z, self.z1r)
z = np.correlate(self.y, self.x[::-1], 'full')
assert_array_almost_equal(z, self.z2r)
z = np.correlate(self.xs, self.y, 'full')
assert_array_almost_equal(z, self.zs)
def test_object(self):
self._setup(Decimal)
z = np.correlate(self.x, self.y, 'full')
assert_array_almost_equal(z, self.z1)
z = np.correlate(self.y, self.x, 'full')
assert_array_almost_equal(z, self.z2)
def test_no_overwrite(self):
d = np.ones(100)
k = np.ones(3)
np.correlate(d, k)
assert_array_equal(d, np.ones(100))
assert_array_equal(k, np.ones(3))
def test_complex(self):
x = np.array([1, 2, 3, 4+1j], dtype=complex)
y = np.array([-1, -2j, 3+1j], dtype=complex)
r_z = np.array([3-1j, 6, 8+1j, 11+5j, -5+8j, -4-1j], dtype=complex)
r_z = r_z[::-1].conjugate()
z = np.correlate(y, x, mode='full')
assert_array_almost_equal(z, r_z)
def test_zero_size(self):
with pytest.raises(ValueError):
np.correlate(np.array([]), np.ones(1000), mode='full')
with pytest.raises(ValueError):
np.correlate(np.ones(1000), np.array([]), mode='full')
class TestConvolve:
def test_object(self):
d = [1.] * 100
k = [1.] * 3
assert_array_almost_equal(np.convolve(d, k)[2:-2], np.full(98, 3))
def test_no_overwrite(self):
d = np.ones(100)
k = np.ones(3)
np.convolve(d, k)
assert_array_equal(d, np.ones(100))
assert_array_equal(k, np.ones(3))
class TestArgwhere:
@pytest.mark.parametrize('nd', [0, 1, 2])
def test_nd(self, nd):
# get an nd array with multiple elements in every dimension
x = np.empty((2,)*nd, bool)
# none
x[...] = False
assert_equal(np.argwhere(x).shape, (0, nd))
# only one
x[...] = False
x.flat[0] = True
assert_equal(np.argwhere(x).shape, (1, nd))
# all but one
x[...] = True
x.flat[0] = False
assert_equal(np.argwhere(x).shape, (x.size - 1, nd))
# all
x[...] = True
assert_equal(np.argwhere(x).shape, (x.size, nd))
def test_2D(self):
x = np.arange(6).reshape((2, 3))
assert_array_equal(np.argwhere(x > 1),
[[0, 2],
[1, 0],
[1, 1],
[1, 2]])
def test_list(self):
assert_equal(np.argwhere([4, 0, 2, 1, 3]), [[0], [2], [3], [4]])
class TestStringFunction:
def test_set_string_function(self):
a = np.array([1])
np.set_string_function(lambda x: "FOO", repr=True)
assert_equal(repr(a), "FOO")
np.set_string_function(None, repr=True)
assert_equal(repr(a), "array([1])")
np.set_string_function(lambda x: "FOO", repr=False)
assert_equal(str(a), "FOO")
np.set_string_function(None, repr=False)
assert_equal(str(a), "[1]")
class TestRoll:
def test_roll1d(self):
x = np.arange(10)
xr = np.roll(x, 2)
assert_equal(xr, np.array([8, 9, 0, 1, 2, 3, 4, 5, 6, 7]))
def test_roll2d(self):
x2 = np.reshape(np.arange(10), (2, 5))
x2r = np.roll(x2, 1)
assert_equal(x2r, np.array([[9, 0, 1, 2, 3], [4, 5, 6, 7, 8]]))
x2r = np.roll(x2, 1, axis=0)
assert_equal(x2r, np.array([[5, 6, 7, 8, 9], [0, 1, 2, 3, 4]]))
x2r = np.roll(x2, 1, axis=1)
assert_equal(x2r, np.array([[4, 0, 1, 2, 3], [9, 5, 6, 7, 8]]))
# Roll multiple axes at once.
x2r = np.roll(x2, 1, axis=(0, 1))
assert_equal(x2r, np.array([[9, 5, 6, 7, 8], [4, 0, 1, 2, 3]]))
x2r = np.roll(x2, (1, 0), axis=(0, 1))
assert_equal(x2r, np.array([[5, 6, 7, 8, 9], [0, 1, 2, 3, 4]]))
x2r = np.roll(x2, (-1, 0), axis=(0, 1))
assert_equal(x2r, np.array([[5, 6, 7, 8, 9], [0, 1, 2, 3, 4]]))
x2r = np.roll(x2, (0, 1), axis=(0, 1))
assert_equal(x2r, np.array([[4, 0, 1, 2, 3], [9, 5, 6, 7, 8]]))
x2r = np.roll(x2, (0, -1), axis=(0, 1))
assert_equal(x2r, np.array([[1, 2, 3, 4, 0], [6, 7, 8, 9, 5]]))
x2r = np.roll(x2, (1, 1), axis=(0, 1))
assert_equal(x2r, np.array([[9, 5, 6, 7, 8], [4, 0, 1, 2, 3]]))
x2r = np.roll(x2, (-1, -1), axis=(0, 1))
assert_equal(x2r, np.array([[6, 7, 8, 9, 5], [1, 2, 3, 4, 0]]))
# Roll the same axis multiple times.
x2r = np.roll(x2, 1, axis=(0, 0))
assert_equal(x2r, np.array([[0, 1, 2, 3, 4], [5, 6, 7, 8, 9]]))
x2r = np.roll(x2, 1, axis=(1, 1))
assert_equal(x2r, np.array([[3, 4, 0, 1, 2], [8, 9, 5, 6, 7]]))
# Roll more than one turn in either direction.
x2r = np.roll(x2, 6, axis=1)
assert_equal(x2r, np.array([[4, 0, 1, 2, 3], [9, 5, 6, 7, 8]]))
x2r = np.roll(x2, -4, axis=1)
assert_equal(x2r, np.array([[4, 0, 1, 2, 3], [9, 5, 6, 7, 8]]))
def test_roll_empty(self):
x = np.array([])
assert_equal(np.roll(x, 1), np.array([]))
class TestRollaxis:
# expected shape indexed by (axis, start) for array of
# shape (1, 2, 3, 4)
tgtshape = {(0, 0): (1, 2, 3, 4), (0, 1): (1, 2, 3, 4),
(0, 2): (2, 1, 3, 4), (0, 3): (2, 3, 1, 4),
(0, 4): (2, 3, 4, 1),
(1, 0): (2, 1, 3, 4), (1, 1): (1, 2, 3, 4),
(1, 2): (1, 2, 3, 4), (1, 3): (1, 3, 2, 4),
(1, 4): (1, 3, 4, 2),
(2, 0): (3, 1, 2, 4), (2, 1): (1, 3, 2, 4),
(2, 2): (1, 2, 3, 4), (2, 3): (1, 2, 3, 4),
(2, 4): (1, 2, 4, 3),
(3, 0): (4, 1, 2, 3), (3, 1): (1, 4, 2, 3),
(3, 2): (1, 2, 4, 3), (3, 3): (1, 2, 3, 4),
(3, 4): (1, 2, 3, 4)}
def test_exceptions(self):
a = np.arange(1*2*3*4).reshape(1, 2, 3, 4)
assert_raises(np.AxisError, np.rollaxis, a, -5, 0)
assert_raises(np.AxisError, np.rollaxis, a, 0, -5)
assert_raises(np.AxisError, np.rollaxis, a, 4, 0)
assert_raises(np.AxisError, np.rollaxis, a, 0, 5)
def test_results(self):
a = np.arange(1*2*3*4).reshape(1, 2, 3, 4).copy()
aind = np.indices(a.shape)
assert_(a.flags['OWNDATA'])
for (i, j) in self.tgtshape:
# positive axis, positive start
res = np.rollaxis(a, axis=i, start=j)
i0, i1, i2, i3 = aind[np.array(res.shape) - 1]
assert_(np.all(res[i0, i1, i2, i3] == a))
assert_(res.shape == self.tgtshape[(i, j)], str((i,j)))
assert_(not res.flags['OWNDATA'])
# negative axis, positive start
ip = i + 1
res = np.rollaxis(a, axis=-ip, start=j)
i0, i1, i2, i3 = aind[np.array(res.shape) - 1]
assert_(np.all(res[i0, i1, i2, i3] == a))
assert_(res.shape == self.tgtshape[(4 - ip, j)])
assert_(not res.flags['OWNDATA'])
# positive axis, negative start
jp = j + 1 if j < 4 else j
res = np.rollaxis(a, axis=i, start=-jp)
i0, i1, i2, i3 = aind[np.array(res.shape) - 1]
assert_(np.all(res[i0, i1, i2, i3] == a))
assert_(res.shape == self.tgtshape[(i, 4 - jp)])
assert_(not res.flags['OWNDATA'])
# negative axis, negative start
ip = i + 1
jp = j + 1 if j < 4 else j
res = np.rollaxis(a, axis=-ip, start=-jp)
i0, i1, i2, i3 = aind[np.array(res.shape) - 1]
assert_(np.all(res[i0, i1, i2, i3] == a))
assert_(res.shape == self.tgtshape[(4 - ip, 4 - jp)])
assert_(not res.flags['OWNDATA'])
class TestMoveaxis:
def test_move_to_end(self):
x = np.random.randn(5, 6, 7)
for source, expected in [(0, (6, 7, 5)),
(1, (5, 7, 6)),
(2, (5, 6, 7)),
(-1, (5, 6, 7))]:
actual = np.moveaxis(x, source, -1).shape
assert_(actual, expected)
def test_move_new_position(self):
x = np.random.randn(1, 2, 3, 4)
for source, destination, expected in [
(0, 1, (2, 1, 3, 4)),
(1, 2, (1, 3, 2, 4)),
(1, -1, (1, 3, 4, 2)),
]:
actual = np.moveaxis(x, source, destination).shape
assert_(actual, expected)
def test_preserve_order(self):
x = np.zeros((1, 2, 3, 4))
for source, destination in [
(0, 0),
(3, -1),
(-1, 3),
([0, -1], [0, -1]),
([2, 0], [2, 0]),
(range(4), range(4)),
]:
actual = np.moveaxis(x, source, destination).shape
assert_(actual, (1, 2, 3, 4))
def test_move_multiples(self):
x = np.zeros((0, 1, 2, 3))
for source, destination, expected in [
([0, 1], [2, 3], (2, 3, 0, 1)),
([2, 3], [0, 1], (2, 3, 0, 1)),
([0, 1, 2], [2, 3, 0], (2, 3, 0, 1)),
([3, 0], [1, 0], (0, 3, 1, 2)),
([0, 3], [0, 1], (0, 3, 1, 2)),
]:
actual = np.moveaxis(x, source, destination).shape
assert_(actual, expected)
def test_errors(self):
x = np.random.randn(1, 2, 3)
assert_raises_regex(np.AxisError, 'source.*out of bounds',
np.moveaxis, x, 3, 0)
assert_raises_regex(np.AxisError, 'source.*out of bounds',
np.moveaxis, x, -4, 0)
assert_raises_regex(np.AxisError, 'destination.*out of bounds',
np.moveaxis, x, 0, 5)
assert_raises_regex(ValueError, 'repeated axis in `source`',
np.moveaxis, x, [0, 0], [0, 1])
assert_raises_regex(ValueError, 'repeated axis in `destination`',
np.moveaxis, x, [0, 1], [1, 1])
assert_raises_regex(ValueError, 'must have the same number',
np.moveaxis, x, 0, [0, 1])
assert_raises_regex(ValueError, 'must have the same number',
np.moveaxis, x, [0, 1], [0])
def test_array_likes(self):
x = np.ma.zeros((1, 2, 3))
result = np.moveaxis(x, 0, 0)
assert_(x.shape, result.shape)
assert_(isinstance(result, np.ma.MaskedArray))
x = [1, 2, 3]
result = np.moveaxis(x, 0, 0)
assert_(x, list(result))
assert_(isinstance(result, np.ndarray))
class TestCross:
def test_2x2(self):
u = [1, 2]
v = [3, 4]
z = -2
cp = np.cross(u, v)
assert_equal(cp, z)
cp = np.cross(v, u)
assert_equal(cp, -z)
def test_2x3(self):
u = [1, 2]
v = [3, 4, 5]
z = np.array([10, -5, -2])
cp = np.cross(u, v)
assert_equal(cp, z)
cp = np.cross(v, u)
assert_equal(cp, -z)
def test_3x3(self):
u = [1, 2, 3]
v = [4, 5, 6]
z = np.array([-3, 6, -3])
cp = np.cross(u, v)
assert_equal(cp, z)
cp = np.cross(v, u)
assert_equal(cp, -z)
def test_broadcasting(self):
# Ticket #2624 (Trac #2032)
u = np.tile([1, 2], (11, 1))
v = np.tile([3, 4], (11, 1))
z = -2
assert_equal(np.cross(u, v), z)
assert_equal(np.cross(v, u), -z)
assert_equal(np.cross(u, u), 0)
u = np.tile([1, 2], (11, 1)).T
v = np.tile([3, 4, 5], (11, 1))
z = np.tile([10, -5, -2], (11, 1))
assert_equal(np.cross(u, v, axisa=0), z)
assert_equal(np.cross(v, u.T), -z)
assert_equal(np.cross(v, v), 0)
u = np.tile([1, 2, 3], (11, 1)).T
v = np.tile([3, 4], (11, 1)).T
z = np.tile([-12, 9, -2], (11, 1))
assert_equal(np.cross(u, v, axisa=0, axisb=0), z)
assert_equal(np.cross(v.T, u.T), -z)
assert_equal(np.cross(u.T, u.T), 0)
u = np.tile([1, 2, 3], (5, 1))
v = np.tile([4, 5, 6], (5, 1)).T
z = np.tile([-3, 6, -3], (5, 1))
assert_equal(np.cross(u, v, axisb=0), z)
assert_equal(np.cross(v.T, u), -z)
assert_equal(np.cross(u, u), 0)
def test_broadcasting_shapes(self):
u = np.ones((2, 1, 3))
v = np.ones((5, 3))
assert_equal(np.cross(u, v).shape, (2, 5, 3))
u = np.ones((10, 3, 5))
v = np.ones((2, 5))
assert_equal(np.cross(u, v, axisa=1, axisb=0).shape, (10, 5, 3))
assert_raises(np.AxisError, np.cross, u, v, axisa=1, axisb=2)
assert_raises(np.AxisError, np.cross, u, v, axisa=3, axisb=0)
u = np.ones((10, 3, 5, 7))
v = np.ones((5, 7, 2))
assert_equal(np.cross(u, v, axisa=1, axisc=2).shape, (10, 5, 3, 7))
assert_raises(np.AxisError, np.cross, u, v, axisa=-5, axisb=2)
assert_raises(np.AxisError, np.cross, u, v, axisa=1, axisb=-4)
# gh-5885
u = np.ones((3, 4, 2))
for axisc in range(-2, 2):
assert_equal(np.cross(u, u, axisc=axisc).shape, (3, 4))
def test_outer_out_param():
arr1 = np.ones((5,))
arr2 = np.ones((2,))
arr3 = np.linspace(-2, 2, 5)
out1 = np.ndarray(shape=(5,5))
out2 = np.ndarray(shape=(2, 5))
res1 = np.outer(arr1, arr3, out1)
assert_equal(res1, out1)
assert_equal(np.outer(arr2, arr3, out2), out2)
class TestIndices:
def test_simple(self):
[x, y] = np.indices((4, 3))
assert_array_equal(x, np.array([[0, 0, 0],
[1, 1, 1],
[2, 2, 2],
[3, 3, 3]]))
assert_array_equal(y, np.array([[0, 1, 2],
[0, 1, 2],
[0, 1, 2],
[0, 1, 2]]))
def test_single_input(self):
[x] = np.indices((4,))
assert_array_equal(x, np.array([0, 1, 2, 3]))
[x] = np.indices((4,), sparse=True)
assert_array_equal(x, np.array([0, 1, 2, 3]))
def test_scalar_input(self):
assert_array_equal([], np.indices(()))
assert_array_equal([], np.indices((), sparse=True))
assert_array_equal([[]], np.indices((0,)))
assert_array_equal([[]], np.indices((0,), sparse=True))
def test_sparse(self):
[x, y] = np.indices((4,3), sparse=True)
assert_array_equal(x, np.array([[0], [1], [2], [3]]))
assert_array_equal(y, np.array([[0, 1, 2]]))
@pytest.mark.parametrize("dtype", [np.int32, np.int64, np.float32, np.float64])
@pytest.mark.parametrize("dims", [(), (0,), (4, 3)])
def test_return_type(self, dtype, dims):
inds = np.indices(dims, dtype=dtype)
assert_(inds.dtype == dtype)
for arr in np.indices(dims, dtype=dtype, sparse=True):
assert_(arr.dtype == dtype)
class TestRequire:
flag_names = ['C', 'C_CONTIGUOUS', 'CONTIGUOUS',
'F', 'F_CONTIGUOUS', 'FORTRAN',
'A', 'ALIGNED',
'W', 'WRITEABLE',
'O', 'OWNDATA']
def generate_all_false(self, dtype):
arr = np.zeros((2, 2), [('junk', 'i1'), ('a', dtype)])
arr.setflags(write=False)
a = arr['a']
assert_(not a.flags['C'])
assert_(not a.flags['F'])
assert_(not a.flags['O'])
assert_(not a.flags['W'])
assert_(not a.flags['A'])
return a
def set_and_check_flag(self, flag, dtype, arr):
if dtype is None:
dtype = arr.dtype
b = np.require(arr, dtype, [flag])
assert_(b.flags[flag])
assert_(b.dtype == dtype)
# a further call to np.require ought to return the same array
# unless OWNDATA is specified.
c = np.require(b, None, [flag])
if flag[0] != 'O':
assert_(c is b)
else:
assert_(c.flags[flag])
def test_require_each(self):
id = ['f8', 'i4']
fd = [None, 'f8', 'c16']
for idtype, fdtype, flag in itertools.product(id, fd, self.flag_names):
a = self.generate_all_false(idtype)
self.set_and_check_flag(flag, fdtype, a)
def test_unknown_requirement(self):
a = self.generate_all_false('f8')
assert_raises(KeyError, np.require, a, None, 'Q')
def test_non_array_input(self):
a = np.require([1, 2, 3, 4], 'i4', ['C', 'A', 'O'])
assert_(a.flags['O'])
assert_(a.flags['C'])
assert_(a.flags['A'])
assert_(a.dtype == 'i4')
assert_equal(a, [1, 2, 3, 4])
def test_C_and_F_simul(self):
a = self.generate_all_false('f8')
assert_raises(ValueError, np.require, a, None, ['C', 'F'])
def test_ensure_array(self):
class ArraySubclass(np.ndarray):
pass
a = ArraySubclass((2, 2))
b = np.require(a, None, ['E'])
assert_(type(b) is np.ndarray)
def test_preserve_subtype(self):
class ArraySubclass(np.ndarray):
pass
for flag in self.flag_names:
a = ArraySubclass((2, 2))
self.set_and_check_flag(flag, None, a)
class TestBroadcast:
def test_broadcast_in_args(self):
# gh-5881
arrs = [np.empty((6, 7)), np.empty((5, 6, 1)), np.empty((7,)),
np.empty((5, 1, 7))]
mits = [np.broadcast(*arrs),
np.broadcast(np.broadcast(*arrs[:0]), np.broadcast(*arrs[0:])),
np.broadcast(np.broadcast(*arrs[:1]), np.broadcast(*arrs[1:])),
np.broadcast(np.broadcast(*arrs[:2]), np.broadcast(*arrs[2:])),
np.broadcast(arrs[0], np.broadcast(*arrs[1:-1]), arrs[-1])]
for mit in mits:
assert_equal(mit.shape, (5, 6, 7))
assert_equal(mit.ndim, 3)
assert_equal(mit.nd, 3)
assert_equal(mit.numiter, 4)
for a, ia in zip(arrs, mit.iters):
assert_(a is ia.base)
def test_broadcast_single_arg(self):
# gh-6899
arrs = [np.empty((5, 6, 7))]
mit = np.broadcast(*arrs)
assert_equal(mit.shape, (5, 6, 7))
assert_equal(mit.ndim, 3)
assert_equal(mit.nd, 3)
assert_equal(mit.numiter, 1)
assert_(arrs[0] is mit.iters[0].base)
def test_number_of_arguments(self):
arr = np.empty((5,))
for j in range(35):
arrs = [arr] * j
if j > 32:
assert_raises(ValueError, np.broadcast, *arrs)
else:
mit = np.broadcast(*arrs)
assert_equal(mit.numiter, j)
def test_broadcast_error_kwargs(self):
#gh-13455
arrs = [np.empty((5, 6, 7))]
mit = np.broadcast(*arrs)
mit2 = np.broadcast(*arrs, **{})
assert_equal(mit.shape, mit2.shape)
assert_equal(mit.ndim, mit2.ndim)
assert_equal(mit.nd, mit2.nd)
assert_equal(mit.numiter, mit2.numiter)
assert_(mit.iters[0].base is mit2.iters[0].base)
assert_raises(ValueError, np.broadcast, 1, **{'x': 1})
class TestKeepdims:
class sub_array(np.ndarray):
def sum(self, axis=None, dtype=None, out=None):
return np.ndarray.sum(self, axis, dtype, out, keepdims=True)
def test_raise(self):
sub_class = self.sub_array
x = np.arange(30).view(sub_class)
assert_raises(TypeError, np.sum, x, keepdims=True)
class TestTensordot:
def test_zero_dimension(self):
# Test resolution to issue #5663
a = np.ndarray((3,0))
b = np.ndarray((0,4))
td = np.tensordot(a, b, (1, 0))
assert_array_equal(td, np.dot(a, b))
assert_array_equal(td, np.einsum('ij,jk', a, b))
def test_zero_dimensional(self):
# gh-12130
arr_0d = np.array(1)
ret = np.tensordot(arr_0d, arr_0d, ([], [])) # contracting no axes is well defined
assert_array_equal(ret, arr_0d)
| 38.205661
| 95
| 0.539624
|
400910476a451a3eb79a2e75b29b7e4856995d5e
| 3,029
|
py
|
Python
|
api/tacticalrmm/checks/migrations/0027_auto_20220401_2248.py
|
v2cloud/tacticalrmm
|
12f599f9749985f66ff9b559c5e5abd36064b182
|
[
"MIT"
] | null | null | null |
api/tacticalrmm/checks/migrations/0027_auto_20220401_2248.py
|
v2cloud/tacticalrmm
|
12f599f9749985f66ff9b559c5e5abd36064b182
|
[
"MIT"
] | null | null | null |
api/tacticalrmm/checks/migrations/0027_auto_20220401_2248.py
|
v2cloud/tacticalrmm
|
12f599f9749985f66ff9b559c5e5abd36064b182
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.2.12 on 2022-04-01 22:48
from django.db import migrations, transaction
from django.db.utils import IntegrityError
from tacticalrmm.constants import CheckType
def migrate_check_results(apps, schema_editor):
Check = apps.get_model("checks", "Check")
CheckResult = apps.get_model("checks", "CheckResult")
for check in Check.objects.exclude(agent=None).iterator():
try:
with transaction.atomic():
if check.managed_by_policy:
CheckResult.objects.create(
assigned_check_id=check.parent_check,
agent_id=check.agent.id,
status=check.status,
more_info=check.more_info,
last_run=check.last_run,
fail_count=check.fail_count,
outage_history=check.outage_history,
extra_details=check.extra_details,
stdout=check.stdout,
stderr=check.stderr,
retcode=check.retcode,
execution_time=check.execution_time,
history=check.history,
alert_severity=check.alert_severity
if check.check_type
in [
CheckType.MEMORY,
CheckType.CPU_LOAD,
CheckType.DISK_SPACE,
CheckType.SCRIPT,
]
else None,
)
else:
CheckResult.objects.create(
assigned_check_id=check.id,
agent_id=check.agent.id,
status=check.status,
more_info=check.more_info,
last_run=check.last_run,
fail_count=check.fail_count,
outage_history=check.outage_history,
extra_details=check.extra_details,
stdout=check.stdout,
stderr=check.stderr,
retcode=check.retcode,
execution_time=check.execution_time,
history=check.history,
alert_severity=check.alert_severity
if check.check_type
in [
CheckType.MEMORY,
CheckType.CPU_LOAD,
CheckType.DISK_SPACE,
CheckType.SCRIPT,
]
else None,
)
except IntegrityError:
pass
class Migration(migrations.Migration):
atomic = False
dependencies = [
("checks", "0026_auto_20220401_2244"),
]
operations = [
migrations.RunPython(migrate_check_results),
]
| 37.395062
| 62
| 0.470782
|
f5a21ff61b0c9a0578d0621d66e2cf6f0606b510
| 658
|
py
|
Python
|
test/v0_1/test_base.py
|
mgorny/logfury
|
14ffc185d084084b068136a8c14354c38aa8ec8c
|
[
"BSD-3-Clause"
] | null | null | null |
test/v0_1/test_base.py
|
mgorny/logfury
|
14ffc185d084084b068136a8c14354c38aa8ec8c
|
[
"BSD-3-Clause"
] | null | null | null |
test/v0_1/test_base.py
|
mgorny/logfury
|
14ffc185d084084b068136a8c14354c38aa8ec8c
|
[
"BSD-3-Clause"
] | null | null | null |
from __future__ import print_function
from contextlib import contextmanager
import logging
import platform
import six
import unittest
class TestBase(unittest.TestCase):
def setUp(self):
self.logger = logging.getLogger(__name__)
self.base_logger_name = __name__
def _get_prefix(self):
if six.PY2 or platform.python_implementation() == 'PyPy':
return ''
else:
return self.__class__.__name__ + '.'
@contextmanager
def assertRaises(self, exc):
try:
yield
except exc:
pass
else:
assert False, 'should have thrown %s' % (exc,)
| 22.689655
| 65
| 0.62462
|
dc5aa82d952e2133942126aaf6d6f4bad33c849b
| 9,905
|
py
|
Python
|
SMI_LSL/DataStreaming.py
|
ForSubmission/MailPhys
|
d11dc04802d69218ca49ef3b3452656197faaabc
|
[
"MIT"
] | 1
|
2018-06-08T10:26:54.000Z
|
2018-06-08T10:26:54.000Z
|
SMI_LSL/DataStreaming.py
|
ForSubmission/MailPhys
|
d11dc04802d69218ca49ef3b3452656197faaabc
|
[
"MIT"
] | null | null | null |
SMI_LSL/DataStreaming.py
|
ForSubmission/MailPhys
|
d11dc04802d69218ca49ef3b3452656197faaabc
|
[
"MIT"
] | 2
|
2018-06-11T18:13:57.000Z
|
2019-05-12T07:26:31.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------
#
# (c) Copyright 1997-2013, SensoMotoric Instruments GmbH, Alto University
#
# Permission is hereby granted, free of charge, to any person or
# organization obtaining a copy of the software and accompanying
# documentation covered by this license (the "Software") to use,
# reproduce, display, distribute, execute, and transmit the Software,
# and to prepare derivative works of the Software, and to permit
# third-parties to whom the Software is furnished to do so, all subject
# to the following:
#
# The copyright notices in the Software and this entire statement,
# including the above license grant, this restriction and the following
# disclaimer, must be included in all copies of the Software, in whole
# or in part, and all derivative works of the Software, unless such
# copies or derivative works are solely in the form of
# machine-executable object code generated by a source language
# processor.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, TITLE AND
# NON-INFRINGEMENT. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR ANYONE
# DISTRIBUTING THE SOFTWARE BE LIABLE FOR ANY DAMAGES OR OTHER
# LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT
# OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
# -----------------------------------------------------------------------
# REQUIRES PYTHON 2
# This script fetches data from the same machine in which iViewX is running.
# All packets received from iViewX are passed on to Lab Streaming Layer.
from iViewXAPI import * #iViewX library
from iViewXAPIReturnCodes import *
import time
import pylsl as lsl
def marcoTime():
return int(round(time.time() * 1000) - 1446909066675)
# ---------------------------------------------
# ---- connect to iViewX
# ---------------------------------------------
res = iViewXAPI.iV_Connect(c_char_p('127.0.0.1'), c_int(4444), c_char_p('127.0.0.1'), c_int(5555))
if res != 1:
HandleError(res)
exit(0)
res = iViewXAPI.iV_SetLogger(c_int(1), c_char_p("iViewXSDK_Python_lsl.txt"))
res = iViewXAPI.iV_GetSystemInfo(byref(systemData))
print "iV_GetSystemInfo: " + str(res)
samplingRate = round(systemData.samplerate)
print "Samplerate: " + str(samplingRate)
print "iViewX Version: " + str(systemData.iV_MajorVersion) + "." + str(systemData.iV_MinorVersion) + "." + str(systemData.iV_Buildnumber)
print "iViewX API Version: " + str(systemData.API_MajorVersion) + "." + str(systemData.API_MinorVersion) + "." + str(systemData.API_Buildnumber)
# ---------------------------------------------
# ---- constants / support
# ---------------------------------------------
# left eye mapped to -1, right to 1, unkown to 0
eyeDict = {'l': -1, 'L': -1, 'LEFT': -1, 'left': -1, 'Left': -1, 'r': 1, 'R': 1, 'RIGHT': 1, 'right': 1, 'Right': 1}
k_EyeUnknown = 0 # number of eye when unkown
# -- lsl constants --
k_nchans_raw = 13 # raw stream channels
k_nchans_event = 7 # event stream channels
k_chunkSize = 32 # size of chunks (using example given by lsl)
k_maxBuff = 30 # maximum buffer size in seconds
# ---------------------------------------------
# ---- lab streaming layer
# ---------------------------------------------
rawStream_info = lsl.StreamInfo('SMI_Raw', 'Gaze', k_nchans_raw, samplingRate, 'float32', 'smiraw500xa15')
eventStream_info = lsl.StreamInfo('SMI_Event', 'Event', k_nchans_event, samplingRate, 'float32', 'smievent500ds15')
# append meta-data
rawStream_info.desc().append_child_value("manufacturer", "SMI")
eventStream_info.desc().append_child_value("manufacturer", "SMI")
rawStream_info.desc().append_child_value("model", "RED")
eventStream_info.desc().append_child_value("model", "RED")
rawStream_info.desc().append_child_value("api", "iViewPythonLSL")
eventStream_info.desc().append_child_value("api", "iViewPythonLSL")
# -- RAW (GAZE) CHANNELS --
rawChannels = rawStream_info.desc().append_child("channels")
# Make sure order matches order in midas' node
for c in ["timestamp"]:
rawChannels.append_child("channel")\
.append_child_value("label", c)\
.append_child_value("unit", "microseconds")\
.append_child_value("type", "Gaze")
for c in ["leftGazeX", "leftGazeY"]:
rawChannels.append_child("channel")\
.append_child_value("label", c)\
.append_child_value("unit", "pixels")\
.append_child_value("type", "Gaze")
for c in ["leftDiam", "leftEyePositionX", "leftEyePositionY", "leftEyePositionZ", "rightGazeX", "rightGazeY", "rightDiam", "rightEyePositionX", "rightEyePositionY", "rightEyePositionZ"]:
rawChannels.append_child("channel")\
.append_child_value("label", c)\
.append_child_value("unit", "millimetres")\
.append_child_value("type", "Gaze")
# -- EVENT CHANNELS --
eventChannels = eventStream_info.desc().append_child("channels")
# Make sure order matches order in midas' node
for c in ["eye"]:
eventChannels.append_child("channel")\
.append_child_value("label", c)\
.append_child_value("unit", "index")\
.append_child_value("type", "Event")
for c in ["startTime", "endTime", "duration"]:
eventChannels.append_child("channel")\
.append_child_value("label", c)\
.append_child_value("unit", "microseconds")\
.append_child_value("type", "Event")
for c in ["positionX", "positionY"]:
eventChannels.append_child("channel")\
.append_child_value("label", c)\
.append_child_value("unit", "pixels")\
.append_child_value("type", "Event")
for c in ["marcotime"]:
eventChannels.append_child("channel")\
.append_child_value("label", c)\
.append_child_value("unit", "milliseconds")\
.append_child_value("type", "Event")
# ---------------------------------------------
# ---- lsl outlets
# ---------------------------------------------
rawOutlet = lsl.StreamOutlet(rawStream_info, k_chunkSize, k_maxBuff)
eventOutlet = lsl.StreamOutlet(eventStream_info, k_chunkSize, k_maxBuff)
# ---------------------------------------------
# ---- configure and start calibration
# ---------------------------------------------
minAccuracy = 1.0
accLX = 1000
accLY = 1000
accRX = 1000
accRY = 1000
inkey = "x"
while (accLX > minAccuracy or accLY > minAccuracy or accRX > minAccuracy or accRY > minAccuracy) and not 's' in inkey:
displayDevice = 1
if 'm' in inkey:
autoControl = 0
else:
autoControl = 1
calibrationData = CCalibration(9, 1, displayDevice, 0, autoControl, 250, 220, 2, 20, b"")
res = iViewXAPI.iV_SetupCalibration(byref(calibrationData))
print "iV_SetupCalibration " + str(res)
res = iViewXAPI.iV_Calibrate()
print "iV_Calibrate " + str(res)
res = iViewXAPI.iV_Validate()
print "iV_Validate " + str(res)
res = iViewXAPI.iV_GetAccuracy(byref(accuracyData), 0)
print "iV_GetAccuracy " + str(res)
print "deviationXLeft " + str(accuracyData.deviationLX) + " deviationYLeft " + str(accuracyData.deviationLY)
print "deviationXRight " + str(accuracyData.deviationRX) + " deviationYRight " + str(accuracyData.deviationRY)
accLX = accuracyData.deviationLX
accLY = accuracyData.deviationLY
accRX = accuracyData.deviationRX
accRY = accuracyData.deviationRY
if accLX > minAccuracy or accLY > minAccuracy or accRX > minAccuracy or accRY > minAccuracy:
print("One or more accuracies were above " + str(minAccuracy))
inkey = raw_input("Just press enter to repeat auto calibration, 'm' (+ Enter) to repeat calibration under manual control or 's' (+ Enter) to skip further calibration >")
# ---------------------------------------------
# ---- define the callback functions. Also see the enum and string arrays in PeyeConstants for input/output formats.
# ---------------------------------------------
def SampleCallback(sample):
data = [None] * k_nchans_raw
data[0] = sample.timestamp
data[1] = sample.leftEye.gazeX
data[2] = sample.leftEye.gazeY
data[3] = sample.leftEye.diam
data[4] = sample.leftEye.eyePositionX
data[5] = sample.leftEye.eyePositionY
data[6] = sample.leftEye.eyePositionZ
data[7] = sample.rightEye.gazeX
data[8] = sample.rightEye.gazeY
data[9] = sample.rightEye.diam
data[10] = sample.rightEye.eyePositionX
data[11] = sample.rightEye.eyePositionY
data[12] = sample.rightEye.eyePositionZ
rawOutlet.push_sample(data)
return 0
def EventCallback(event):
data = [None] * k_nchans_event
data[0] = eyeDict[event.eye]
data[1] = event.startTime
data[2] = event.endTime
data[3] = event.duration
data[4] = event.positionX
data[5] = event.positionY
data[6] = marcoTime()
eventOutlet.push_sample(data)
return 0
CMPFUNC = WINFUNCTYPE(c_int, CSample)
smp_func = CMPFUNC(SampleCallback)
sampleCB = False
CMPFUNC = WINFUNCTYPE(c_int, CEvent)
event_func = CMPFUNC(EventCallback)
eventCB = False
# ---------------------------------------------
# ---- start DataStreaming, loops until q is entered
# ---------------------------------------------
res = iViewXAPI.iV_SetSampleCallback(smp_func)
sampleCB = True
res = iViewXAPI.iV_SetEventCallback(event_func)
eventCB = True
command = ''
while not command == 'q':
print('')
print('STREAMING STARTED')
print('')
command = raw_input('q+enter to stop streaming eye data. ')
print('Terminating... ')
sampleCB = False
eventCB = False
# ---------------------------------------------
# ---- stop recording and disconnect from iViewX
# ---------------------------------------------
res = iViewXAPI.iV_Disconnect()
| 37.236842
| 186
| 0.640485
|
dc571f87a1b938e912d0564ecaedb74ad407fd8c
| 9,430
|
py
|
Python
|
sympy/assumptions/sathandlers.py
|
ianmasc/sympy
|
f089bdc70cfa1e2aa6ecfdb6d568f37bd937bd5e
|
[
"BSD-3-Clause"
] | null | null | null |
sympy/assumptions/sathandlers.py
|
ianmasc/sympy
|
f089bdc70cfa1e2aa6ecfdb6d568f37bd937bd5e
|
[
"BSD-3-Clause"
] | null | null | null |
sympy/assumptions/sathandlers.py
|
ianmasc/sympy
|
f089bdc70cfa1e2aa6ecfdb6d568f37bd937bd5e
|
[
"BSD-3-Clause"
] | null | null | null |
from collections import defaultdict
from sympy.assumptions.ask import Q
from sympy.core import (Add, Mul, Pow, Number, NumberSymbol, Symbol)
from sympy.core.numbers import ImaginaryUnit
from sympy.functions.elementary.complexes import Abs
from sympy.logic.boolalg import (Equivalent, And, Or, Implies)
from sympy.matrices.expressions import MatMul
# APIs here may be subject to change
### Helper functions ###
def allarg(symbol, fact, expr):
"""
Apply all argument of the expression to the fact structure.
Parameters
==========
symbol : Symbol
A placeholder symbol.
fact : Boolean
Resulting ``Boolean`` expression.
expr : Expr
Examples
========
>>> from sympy import Q
>>> from sympy.assumptions.sathandlers import allarg
>>> from sympy.abc import x, y
>>> allarg(x, Q.negative(x) | Q.positive(x), x*y)
(Q.negative(x) | Q.positive(x)) & (Q.negative(y) | Q.positive(y))
"""
return And(*[fact.subs(symbol, arg) for arg in expr.args])
def anyarg(symbol, fact, expr):
"""
Apply any argument of the expression to the fact structure.
Parameters
==========
symbol : Symbol
A placeholder symbol.
fact : Boolean
Resulting ``Boolean`` expression.
expr : Expr
Examples
========
>>> from sympy import Q
>>> from sympy.assumptions.sathandlers import anyarg
>>> from sympy.abc import x, y
>>> anyarg(x, Q.negative(x) & Q.positive(x), x*y)
(Q.negative(x) & Q.positive(x)) | (Q.negative(y) & Q.positive(y))
"""
return Or(*[fact.subs(symbol, arg) for arg in expr.args])
def exactlyonearg(symbol, fact, expr):
"""
Apply exactly one argument of the expression to the fact structure.
Parameters
==========
symbol : Symbol
A placeholder symbol.
fact : Boolean
Resulting ``Boolean`` expression.
expr : Expr
Examples
========
>>> from sympy import Q
>>> from sympy.assumptions.sathandlers import exactlyonearg
>>> from sympy.abc import x, y
>>> exactlyonearg(x, Q.positive(x), x*y)
(Q.positive(x) & ~Q.positive(y)) | (Q.positive(y) & ~Q.positive(x))
"""
pred_args = [fact.subs(symbol, arg) for arg in expr.args]
res = Or(*[And(pred_args[i], *[~lit for lit in pred_args[:i] +
pred_args[i+1:]]) for i in range(len(pred_args))])
return res
### Fact registry ###
class ClassFactRegistry:
"""
Register handlers against classes.
Explanation
===========
``register`` method registers the handler function for a class. Here,
handler function should return a single fact. ``multiregister`` method
registers the handler function for multiple classes. Here, handler function
should return a container of multiple facts.
``registry(expr)`` returns a set of facts for *expr*.
Examples
========
Here, we register the facts for ``Abs``.
>>> from sympy import Abs, Q
>>> from sympy.logic.boolalg import Equivalent
>>> from sympy.assumptions.sathandlers import ClassFactRegistry
>>> reg = ClassFactRegistry()
>>> @reg.register(Abs)
... def f1(expr):
... return Q.nonnegative(expr)
>>> @reg.register(Abs)
... def f2(expr):
... arg = expr.args[0]
... return Equivalent(~Q.zero(arg), ~Q.zero(expr))
Calling the registry with expression returns the defined facts for the
expression.
>>> from sympy.abc import x
>>> reg(Abs(x))
{Q.nonnegative(Abs(x)), Equivalent(~Q.zero(x), ~Q.zero(Abs(x)))}
Multiple facts can be registered at once by ``multiregister`` method.
>>> reg2 = ClassFactRegistry()
>>> @reg2.multiregister(Abs)
... def _(expr):
... arg = expr.args[0]
... return [Q.even(arg) >> Q.even(expr), Q.odd(arg) >> Q.odd(expr)]
>>> reg2(Abs(x))
{Implies(Q.even(x), Q.even(Abs(x))), Implies(Q.odd(x), Q.odd(Abs(x)))}
"""
def __init__(self):
self.singlefacts = defaultdict(frozenset)
self.multifacts = defaultdict(frozenset)
def register(self, cls):
def _(func):
self.singlefacts[cls] |= {func}
return func
return _
def multiregister(self, *classes):
def _(func):
for cls in classes:
self.multifacts[cls] |= {func}
return func
return _
def __getitem__(self, key):
ret1 = self.singlefacts[key]
for k in self.singlefacts:
if issubclass(key, k):
ret1 |= self.singlefacts[k]
ret2 = self.multifacts[key]
for k in self.multifacts:
if issubclass(key, k):
ret2 |= self.multifacts[k]
return ret1, ret2
def __call__(self, expr):
ret = set()
handlers1, handlers2 = self[expr.func]
for h in handlers1:
ret.add(h(expr))
for h in handlers2:
ret.update(h(expr))
return ret
class_fact_registry = ClassFactRegistry()
### Class fact registration ###
x = Symbol('x')
## Abs ##
@class_fact_registry.multiregister(Abs)
def _(expr):
arg = expr.args[0]
return [Q.nonnegative(expr),
Equivalent(~Q.zero(arg), ~Q.zero(expr)),
Q.even(arg) >> Q.even(expr),
Q.odd(arg) >> Q.odd(expr),
Q.integer(arg) >> Q.integer(expr),
]
### Add ##
@class_fact_registry.multiregister(Add)
def _(expr):
return [allarg(x, Q.positive(x), expr) >> Q.positive(expr),
allarg(x, Q.negative(x), expr) >> Q.negative(expr),
allarg(x, Q.real(x), expr) >> Q.real(expr),
allarg(x, Q.rational(x), expr) >> Q.rational(expr),
allarg(x, Q.integer(x), expr) >> Q.integer(expr),
exactlyonearg(x, ~Q.integer(x), expr) >> ~Q.integer(expr),
]
@class_fact_registry.register(Add)
def _(expr):
allarg_real = allarg(x, Q.real(x), expr)
onearg_irrational = exactlyonearg(x, Q.irrational(x), expr)
return Implies(allarg_real, Implies(onearg_irrational, Q.irrational(expr)))
### Mul ###
@class_fact_registry.multiregister(Mul)
def _(expr):
return [Equivalent(Q.zero(expr), anyarg(x, Q.zero(x), expr)),
allarg(x, Q.positive(x), expr) >> Q.positive(expr),
allarg(x, Q.real(x), expr) >> Q.real(expr),
allarg(x, Q.rational(x), expr) >> Q.rational(expr),
allarg(x, Q.integer(x), expr) >> Q.integer(expr),
exactlyonearg(x, ~Q.rational(x), expr) >> ~Q.integer(expr),
allarg(x, Q.commutative(x), expr) >> Q.commutative(expr),
]
@class_fact_registry.register(Mul)
def _(expr):
# Implicitly assumes Mul has more than one arg
# Would be allarg(x, Q.prime(x) | Q.composite(x)) except 1 is composite
# More advanced prime assumptions will require inequalities, as 1 provides
# a corner case.
allarg_prime = allarg(x, Q.prime(x), expr)
return Implies(allarg_prime, ~Q.prime(expr))
@class_fact_registry.register(Mul)
def _(expr):
# General Case: Odd number of imaginary args implies mul is imaginary(To be implemented)
allarg_imag_or_real = allarg(x, Q.imaginary(x) | Q.real(x), expr)
onearg_imaginary = exactlyonearg(x, Q.imaginary(x), expr)
return Implies(allarg_imag_or_real, Implies(onearg_imaginary, Q.imaginary(expr)))
@class_fact_registry.register(Mul)
def _(expr):
allarg_real = allarg(x, Q.real(x), expr)
onearg_irrational = exactlyonearg(x, Q.irrational(x), expr)
return Implies(allarg_real, Implies(onearg_irrational, Q.irrational(expr)))
@class_fact_registry.register(Mul)
def _(expr):
# Including the integer qualification means we don't need to add any facts
# for odd, since the assumptions already know that every integer is
# exactly one of even or odd.
allarg_integer = allarg(x, Q.integer(x), expr)
anyarg_even = anyarg(x, Q.even(x), expr)
return Implies(allarg_integer, Equivalent(anyarg_even, Q.even(expr)))
### MatMul ###
@class_fact_registry.register(MatMul)
def _(expr):
allarg_square = allarg(x, Q.square(x), expr)
allarg_invertible = allarg(x, Q.invertible(x), expr)
return Implies(allarg_square, Equivalent(Q.invertible(expr), allarg_invertible))
### Pow ###
@class_fact_registry.multiregister(Pow)
def _(expr):
base, exp = expr.base, expr.exp
return [
(Q.real(base) & Q.even(exp) & Q.nonnegative(exp)) >> Q.nonnegative(expr),
(Q.nonnegative(base) & Q.odd(exp) & Q.nonnegative(exp)) >> Q.nonnegative(expr),
(Q.nonpositive(base) & Q.odd(exp) & Q.nonnegative(exp)) >> Q.nonpositive(expr),
Equivalent(Q.zero(expr), Q.zero(base) & Q.positive(exp))
]
### Numbers ###
_old_assump_getters = {
Q.positive: lambda o: o.is_positive,
Q.zero: lambda o: o.is_zero,
Q.negative: lambda o: o.is_negative,
Q.rational: lambda o: o.is_rational,
Q.irrational: lambda o: o.is_irrational,
Q.even: lambda o: o.is_even,
Q.odd: lambda o: o.is_odd,
Q.imaginary: lambda o: o.is_imaginary,
Q.prime: lambda o: o.is_prime,
Q.composite: lambda o: o.is_composite,
}
@class_fact_registry.multiregister(Number, NumberSymbol, ImaginaryUnit)
def _(expr):
ret = []
for p, getter in _old_assump_getters.items():
pred = p(expr)
prop = getter(expr)
if prop is not None:
ret.append(Equivalent(pred, prop))
return ret
| 29.015385
| 92
| 0.621103
|
94d3161ea13718a6932dd7c7562c21b189c6992d
| 1,563
|
py
|
Python
|
module/module_load.py
|
hanbule/CPBypass2
|
b77dad8169679447678611dfe50788233d4115cf
|
[
"Apache-1.1"
] | 110
|
2021-03-06T14:24:14.000Z
|
2022-03-20T19:38:13.000Z
|
module/module_load.py
|
cashtony/CPBypass2
|
db336a4f79868261bd5e8e8835f2b858472708d5
|
[
"Apache-1.1"
] | 6
|
2021-03-08T10:21:39.000Z
|
2021-05-28T10:16:44.000Z
|
module/module_load.py
|
cashtony/CPBypass2
|
db336a4f79868261bd5e8e8835f2b858472708d5
|
[
"Apache-1.1"
] | 24
|
2021-03-06T14:24:51.000Z
|
2022-03-11T14:24:19.000Z
|
#
# Copyright (c) 2019-2020 checkra1n team
# This file is part of pongoOS.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
import struct
import sys
data = open(sys.argv[1], "rb").read()
import usb.core
dev = usb.core.find(idVendor=0x05ac, idProduct=0x4141)
if dev is None:
raise ValueError('Device not found')
dev.set_configuration()
dev.ctrl_transfer(0x21, 2, 0, 0, 0)
dev.ctrl_transfer(0x21, 1, 0, 0, struct.pack('I', len(data)))
dev.write(2,data,100000)
if len(data) % 512 == 0:
dev.write(2,"")
dev.ctrl_transfer(0x21, 3, 0, 0, "modload\n")
| 41.131579
| 80
| 0.752399
|
14c7f392ef4dbbfc9876d16d10c726097f4d9d73
| 14,161
|
py
|
Python
|
test/unit/test_deps.py
|
donaldrauscher/dbt
|
73d0308e3570b25a7a8c8d32fbcdaaf813c94179
|
[
"Apache-2.0"
] | 1
|
2019-10-18T01:16:33.000Z
|
2019-10-18T01:16:33.000Z
|
test/unit/test_deps.py
|
donaldrauscher/dbt
|
73d0308e3570b25a7a8c8d32fbcdaaf813c94179
|
[
"Apache-2.0"
] | null | null | null |
test/unit/test_deps.py
|
donaldrauscher/dbt
|
73d0308e3570b25a7a8c8d32fbcdaaf813c94179
|
[
"Apache-2.0"
] | null | null | null |
import unittest
from unittest import mock
import dbt.exceptions
from dbt.task.deps import (
GitUnpinnedPackage, LocalUnpinnedPackage, RegistryUnpinnedPackage,
LocalPackageContract, GitPackageContract, RegistryPackageContract,
resolve_packages
)
from dbt.contracts.project import PackageConfig
from dbt.semver import VersionSpecifier
from hologram import ValidationError
class TestLocalPackage(unittest.TestCase):
def test_init(self):
a_contract = LocalPackageContract.from_dict({'local': '/path/to/package'})
self.assertEqual(a_contract.local, '/path/to/package')
a = LocalUnpinnedPackage.from_contract(a_contract)
self.assertEqual(a.local, '/path/to/package')
a_pinned = a.resolved()
self.assertEqual(a_pinned.local, '/path/to/package')
self.assertEqual(str(a_pinned), '/path/to/package')
class TestGitPackage(unittest.TestCase):
def test_init(self):
a_contract = GitPackageContract.from_dict(
{'git': 'http://example.com', 'revision': '0.0.1'}
)
self.assertEqual(a_contract.git, 'http://example.com')
self.assertEqual(a_contract.revision, '0.0.1')
self.assertIs(a_contract.warn_unpinned, None)
a = GitUnpinnedPackage.from_contract(a_contract)
self.assertEqual(a.git, 'http://example.com')
self.assertEqual(a.revisions, ['0.0.1'])
self.assertIs(a.warn_unpinned, True)
a_pinned = a.resolved()
self.assertEqual(a_pinned.name, 'http://example.com')
self.assertEqual(a_pinned.get_version(), '0.0.1')
self.assertEqual(a_pinned.source_type(), 'git')
self.assertIs(a_pinned.warn_unpinned, True)
def test_invalid(self):
with self.assertRaises(ValidationError):
GitPackageContract.from_dict(
{'git': 'http://example.com', 'version': '0.0.1'}
)
def test_resolve_ok(self):
a_contract = GitPackageContract.from_dict(
{'git': 'http://example.com', 'revision': '0.0.1'}
)
b_contract = GitPackageContract.from_dict(
{'git': 'http://example.com', 'revision': '0.0.1',
'warn-unpinned': False}
)
a = GitUnpinnedPackage.from_contract(a_contract)
b = GitUnpinnedPackage.from_contract(b_contract)
self.assertTrue(a.warn_unpinned)
self.assertFalse(b.warn_unpinned)
c = a.incorporate(b)
c_pinned = c.resolved()
self.assertEqual(c_pinned.name, 'http://example.com')
self.assertEqual(c_pinned.get_version(), '0.0.1')
self.assertEqual(c_pinned.source_type(), 'git')
self.assertFalse(c_pinned.warn_unpinned)
def test_resolve_fail(self):
a_contract = GitPackageContract.from_dict(
{'git': 'http://example.com', 'revision': '0.0.1'}
)
b_contract = GitPackageContract.from_dict(
{'git': 'http://example.com', 'revision': '0.0.2'}
)
a = GitUnpinnedPackage.from_contract(a_contract)
b = GitUnpinnedPackage.from_contract(b_contract)
c = a.incorporate(b)
self.assertEqual(c.git, 'http://example.com')
self.assertEqual(c.revisions, ['0.0.1', '0.0.2'])
with self.assertRaises(dbt.exceptions.DependencyException):
c.resolved()
def test_default_revision(self):
a_contract = GitPackageContract.from_dict({'git': 'http://example.com'})
self.assertEqual(a_contract.revision, None)
self.assertIs(a_contract.warn_unpinned, None)
a = GitUnpinnedPackage.from_contract(a_contract)
self.assertEqual(a.git, 'http://example.com')
self.assertEqual(a.revisions, [])
self.assertIs(a.warn_unpinned, True)
a_pinned = a.resolved()
self.assertEqual(a_pinned.name, 'http://example.com')
self.assertEqual(a_pinned.get_version(), 'master')
self.assertEqual(a_pinned.source_type(), 'git')
self.assertIs(a_pinned.warn_unpinned, True)
class TestHubPackage(unittest.TestCase):
def setUp(self):
self.patcher = mock.patch('dbt.task.deps.registry')
self.registry = self.patcher.start()
self.index_cached = self.registry.index_cached
self.get_available_versions = self.registry.get_available_versions
self.package_version = self.registry.package_version
self.index_cached.return_value = [
'fishtown-analytics-test/a',
]
self.get_available_versions.return_value = [
'0.1.2', '0.1.3'
]
self.package_version.return_value = {
'id': 'fishtown-analytics-test/a/0.1.2',
'name': 'a',
'version': '0.1.2',
'packages': [],
'_source': {
'blahblah': 'asdfas',
},
'downloads': {
'tarball': 'https://example.com/invalid-url!',
'extra': 'field',
},
'newfield': ['another', 'value'],
}
def tearDown(self):
self.patcher.stop()
def test_init(self):
a_contract = RegistryPackageContract(
package='fishtown-analytics-test/a',
version='0.1.2',
)
self.assertEqual(a_contract.package, 'fishtown-analytics-test/a')
self.assertEqual(a_contract.version, '0.1.2')
a = RegistryUnpinnedPackage.from_contract(a_contract)
self.assertEqual(a.package, 'fishtown-analytics-test/a')
self.assertEqual(
a.versions,
[VersionSpecifier(
build=None,
major='0',
matcher='=',
minor='1',
patch='2',
prerelease=None
)]
)
a_pinned = a.resolved()
self.assertEqual(a_contract.package, 'fishtown-analytics-test/a')
self.assertEqual(a_contract.version, '0.1.2')
self.assertEqual(a_pinned.source_type(), 'hub')
def test_invalid(self):
with self.assertRaises(ValidationError):
RegistryPackageContract.from_dict(
{'package': 'namespace/name', 'key': 'invalid'}
)
def test_resolve_ok(self):
a_contract = RegistryPackageContract(
package='fishtown-analytics-test/a',
version='0.1.2'
)
b_contract = RegistryPackageContract(
package='fishtown-analytics-test/a',
version='0.1.2'
)
a = RegistryUnpinnedPackage.from_contract(a_contract)
b = RegistryUnpinnedPackage.from_contract(b_contract)
c = a.incorporate(b)
self.assertEqual(c.package, 'fishtown-analytics-test/a')
self.assertEqual(
c.versions,
[
VersionSpecifier(
build=None,
major='0',
matcher='=',
minor='1',
patch='2',
prerelease=None,
),
VersionSpecifier(
build=None,
major='0',
matcher='=',
minor='1',
patch='2',
prerelease=None,
),
]
)
c_pinned = c.resolved()
self.assertEqual(c_pinned.package, 'fishtown-analytics-test/a')
self.assertEqual(c_pinned.version, '0.1.2')
self.assertEqual(c_pinned.source_type(), 'hub')
def test_resolve_missing_package(self):
a = RegistryUnpinnedPackage.from_contract(RegistryPackageContract(
package='fishtown-analytics-test/b',
version='0.1.2'
))
with self.assertRaises(dbt.exceptions.DependencyException) as exc:
a.resolved()
msg = 'Package fishtown-analytics-test/b was not found in the package index'
self.assertEqual(msg, str(exc.exception))
def test_resolve_missing_version(self):
a = RegistryUnpinnedPackage.from_contract(RegistryPackageContract(
package='fishtown-analytics-test/a',
version='0.1.4'
))
with self.assertRaises(dbt.exceptions.DependencyException) as exc:
a.resolved()
msg = (
"Could not find a matching version for package "
"fishtown-analytics-test/a\n Requested range: =0.1.4, =0.1.4\n "
"Available versions: ['0.1.2', '0.1.3']"
)
self.assertEqual(msg, str(exc.exception))
def test_resolve_conflict(self):
a_contract = RegistryPackageContract(
package='fishtown-analytics-test/a',
version='0.1.2'
)
b_contract = RegistryPackageContract(
package='fishtown-analytics-test/a',
version='0.1.3'
)
a = RegistryUnpinnedPackage.from_contract(a_contract)
b = RegistryUnpinnedPackage.from_contract(b_contract)
c = a.incorporate(b)
with self.assertRaises(dbt.exceptions.DependencyException) as exc:
c.resolved()
msg = (
"Version error for package fishtown-analytics-test/a: Could not "
"find a satisfactory version from options: ['=0.1.2', '=0.1.3']"
)
self.assertEqual(msg, str(exc.exception))
def test_resolve_ranges(self):
a_contract = RegistryPackageContract(
package='fishtown-analytics-test/a',
version='0.1.2'
)
b_contract = RegistryPackageContract(
package='fishtown-analytics-test/a',
version='<0.1.4'
)
a = RegistryUnpinnedPackage.from_contract(a_contract)
b = RegistryUnpinnedPackage.from_contract(b_contract)
c = a.incorporate(b)
self.assertEqual(c.package, 'fishtown-analytics-test/a')
self.assertEqual(
c.versions,
[
VersionSpecifier(
build=None,
major='0',
matcher='=',
minor='1',
patch='2',
prerelease=None,
),
VersionSpecifier(
build=None,
major='0',
matcher='<',
minor='1',
patch='4',
prerelease=None,
),
]
)
c_pinned = c.resolved()
self.assertEqual(c_pinned.package, 'fishtown-analytics-test/a')
self.assertEqual(c_pinned.version, '0.1.2')
self.assertEqual(c_pinned.source_type(), 'hub')
class MockRegistry:
def __init__(self, packages):
self.packages = packages
def index_cached(self, registry_base_url=None):
return sorted(self.packages)
def get_available_versions(self, name):
try:
pkg = self.packages[name]
except KeyError:
return []
return list(pkg)
def package_version(self, name, version):
try:
return self.packages[name][version]
except KeyError:
return None
class TestPackageSpec(unittest.TestCase):
def setUp(self):
self.patcher = mock.patch('dbt.task.deps.registry')
self.registry = self.patcher.start()
self.mock_registry = MockRegistry(packages={
'fishtown-analytics-test/a': {
'0.1.2': {
'id': 'fishtown-analytics-test/a/0.1.2',
'name': 'a',
'version': '0.1.2',
'packages': [],
'_source': {
'blahblah': 'asdfas',
},
'downloads': {
'tarball': 'https://example.com/invalid-url!',
'extra': 'field',
},
'newfield': ['another', 'value'],
},
'0.1.3': {
'id': 'fishtown-analytics-test/a/0.1.3',
'name': 'a',
'version': '0.1.3',
'packages': [],
'_source': {
'blahblah': 'asdfas',
},
'downloads': {
'tarball': 'https://example.com/invalid-url!',
'extra': 'field',
},
'newfield': ['another', 'value'],
}
},
'fishtown-analytics-test/b': {
'0.2.1': {
'id': 'fishtown-analytics-test/b/0.2.1',
'name': 'b',
'version': '0.2.1',
'packages': [{'package': 'fishtown-analytics-test/a', 'version': '>=0.1.3'}],
'_source': {
'blahblah': 'asdfas',
},
'downloads': {
'tarball': 'https://example.com/invalid-url!',
'extra': 'field',
},
'newfield': ['another', 'value'],
},
}
})
self.registry.index_cached.side_effect = self.mock_registry.index_cached
self.registry.get_available_versions.side_effect = self.mock_registry.get_available_versions
self.registry.package_version.side_effect = self.mock_registry.package_version
def tearDown(self):
self.patcher.stop()
def test_dependency_resolution(self):
package_config = PackageConfig.from_dict({
'packages': [
{'package': 'fishtown-analytics-test/a', 'version': '>0.1.2'},
{'package': 'fishtown-analytics-test/b', 'version': '0.2.1'},
],
})
resolved = resolve_packages(package_config.packages, None)
self.assertEqual(len(resolved), 2)
self.assertEqual(resolved[0].name, 'fishtown-analytics-test/a')
self.assertEqual(resolved[0].version, '0.1.3')
self.assertEqual(resolved[1].name, 'fishtown-analytics-test/b')
self.assertEqual(resolved[1].version, '0.2.1')
| 35.850633
| 100
| 0.547207
|
8d7b657f2c0ead05c92eaac01ef7421bf64bac4b
| 5,018
|
py
|
Python
|
melusine/prepare_email/cleaning.py
|
AntoineSimoulin/melusine
|
bd6369d446c9f2c12789ed5fe4465f2f47a74790
|
[
"Apache-2.0"
] | 1
|
2020-04-09T17:08:04.000Z
|
2020-04-09T17:08:04.000Z
|
melusine/prepare_email/cleaning.py
|
ibrahim85/melusine
|
bfd0ad7ee9968c247e86ef7ec15ccf363d212141
|
[
"Apache-2.0"
] | null | null | null |
melusine/prepare_email/cleaning.py
|
ibrahim85/melusine
|
bfd0ad7ee9968c247e86ef7ec15ccf363d212141
|
[
"Apache-2.0"
] | null | null | null |
"""
Cleaning of the body and the header
"""
import unidecode, unicodedata
import re
from melusine.config import ConfigJsonReader
conf_reader = ConfigJsonReader()
config = conf_reader.get_config_file()
REGEX_CLEAN = config["regex"]['cleaning']
regex_flags_dict = REGEX_CLEAN["flags_dict"]
regex_clean_header_dict = REGEX_CLEAN["clean_header_dict"]
regex_remove_multiple_spaces_list = REGEX_CLEAN["remove_multiple_spaces_list"]
def clean_body(row, flags=True):
"""Clean body column. The cleaning involves the following operations:
- Cleaning the text
- Removing the multiple spaces
- Flagging specific items (postal code, phone number, date...)
Parameters
----------
row : row of pandas.Dataframe object,
Data contains 'last_body' column.
flags : boolean, optional
True if you want to flag relevant info, False if not.
Default value, True.
Returns
-------
row of pandas.DataFrame object or pandas.Series if apply to all DF.
"""
text = str(row["last_body"])
clean_body = clean_text(text)
clean_body = flag_items(clean_body, flags=flags)
return clean_body
def clean_header(row, flags=True):
"""Clean the header column. The cleaning involves the following operations:
- Removing the transfers and answers indicators
- Cleaning the text
- Flagging specific items (postal code, phone number, date...)
Parameters
----------
row : row of pandas.Dataframe object,
Data contains 'header' column.
flags : boolean, optional
True if you want to flag relevant info, False if not.
Default value, True.
Returns
-------
row of pd.DataFrame object or pandas.Series if apply to all DF.
"""
text = str(row["header"])
clean_header = remove_transfer_answer_header(text)
clean_header = clean_text(clean_header)
clean_header = flag_items(clean_header, flags=flags)
return clean_header
def clean_text(text):
"""Clean a string. The cleaning involves the following operations:
- Putting all letters to lowercase
- Removing all the accents
- Removing all line breaks
- Removing all symbols and punctuations
- Removing the multiple spaces
Parameters
----------
text : str
Returns
-------
str
"""
text = text_to_lowercase(text)
text = remove_accents(text)
text = remove_line_break(text)
text = remove_superior_symbol(text)
# text = remove_apostrophe(text)
text = remove_multiple_spaces_and_strip_text(text)
return text
def text_to_lowercase(text):
"""Set all letters to lowercase"""
return text.lower()
def remove_accents(text, use_unidecode=False):
"""
Remove accents from text
Using unidecode is more powerful but much more time consuming
Exemple: the joined 'ae' character is converted to 'a' + 'e' by unidecode while it is suppressed by unicodedata.
"""
if use_unidecode:
return unidecode.unidecode(text)
else:
utf8_str = unicodedata.normalize('NFKD', text).encode('ASCII', 'ignore').decode("utf-8")
return utf8_str
def remove_line_break(text):
"""Remove line breaks from text"""
return text.replace('\n', '')
def remove_superior_symbol(text):
"""Remove superior and inferior symbols from text"""
text = text.replace('>', '')
text = text.replace('<', '')
return text
def remove_apostrophe(text):
"""Remove apostrophes from text"""
return text.replace('\'', ' ')
def remove_multiple_spaces_and_strip_text(text):
"""Remove multiple spaces, strip text, and remove '-', '*' characters.
Parameters
----------
text : str,
Header content.
Returns
-------
str
"""
for regex_remove_multiple_spaces in regex_remove_multiple_spaces_list:
text = re.sub(regex_remove_multiple_spaces, ' ', text)
text = text.strip()
return text
def flag_items(text, flags=True):
"""Flag relevant information
ex : amount, phone number, email address, postal code (5 digits)..
Parameters
----------
text : str,
Body content.
flags : boolean, optional
True if you want to flag relevant info, False if not.
Default value, True.
Returns
-------
str
"""
if flags:
for regex, value in regex_flags_dict.items():
text = re.sub(pattern=regex, repl=value,
string=text, flags=re.IGNORECASE)
return text
else:
return text
def remove_transfer_answer_header(text):
"""Remove historic and transfers indicators in the header.
Ex: "Tr:", "Re:", "Fwd", etc.
Parameters
----------
text : str,
Header content.
Returns
-------
str
"""
for regex, value in regex_clean_header_dict.items():
text = re.sub(pattern=regex, repl=value,
string=text, flags=re.IGNORECASE)
return text
| 26
| 116
| 0.645476
|
34dc60c6ce29961e8308d5bc55714578eaccdfe0
| 36
|
py
|
Python
|
app/VertebraSegmentation/net/data/__init__.py
|
z0978916348/Localization_and_Segmentation
|
1a80d9730dad7ede8c3b92793e85a979915a2fad
|
[
"Unlicense",
"MIT"
] | 6
|
2020-09-17T04:17:32.000Z
|
2021-09-02T03:40:10.000Z
|
app/VertebraSegmentation/net/data/__init__.py
|
z0978916348/Localization_and_Segmentation
|
1a80d9730dad7ede8c3b92793e85a979915a2fad
|
[
"Unlicense",
"MIT"
] | 1
|
2021-01-25T06:25:14.000Z
|
2021-02-25T02:33:49.000Z
|
app/VertebraSegmentation/net/data/__init__.py
|
z0978916348/Localization_and_Segmentation
|
1a80d9730dad7ede8c3b92793e85a979915a2fad
|
[
"Unlicense",
"MIT"
] | null | null | null |
from .dataset import VertebraDataset
| 36
| 36
| 0.888889
|
bca437f35cc417538a99c8da65ef38d69f87c64e
| 27,563
|
py
|
Python
|
mr_uplift/mr_uplift.py
|
Ibotta/mr_uplift
|
e1facd39a87683dfdeaf7b08336e0ce781ff87cf
|
[
"Apache-2.0"
] | 48
|
2020-04-22T16:57:55.000Z
|
2022-02-02T00:21:13.000Z
|
mr_uplift/mr_uplift.py
|
Ibotta/mr_uplift
|
e1facd39a87683dfdeaf7b08336e0ce781ff87cf
|
[
"Apache-2.0"
] | 6
|
2020-05-01T18:15:22.000Z
|
2022-02-21T07:26:18.000Z
|
mr_uplift/mr_uplift.py
|
Ibotta/mr_uplift
|
e1facd39a87683dfdeaf7b08336e0ce781ff87cf
|
[
"Apache-2.0"
] | 4
|
2020-04-25T08:41:34.000Z
|
2022-01-08T11:21:23.000Z
|
import numpy as np
import pandas as pd
import dill
import copy
from tensorflow.keras.models import load_model
from mr_uplift.keras_model_functionality import train_model_multi_output_w_tmt, gridsearch_mo_optim, get_random_weights, treatments_to_text
from mr_uplift.erupt import get_erupts_curves_aupc, get_best_tmts, erupt, get_weights, softmax
from tensorflow.keras.wrappers.scikit_learn import KerasRegressor
from sklearn.model_selection import GridSearchCV
from sklearn.preprocessing import StandardScaler, KBinsDiscretizer, FunctionTransformer
from sklearn.decomposition import PCA
from sklearn.model_selection import train_test_split
from mr_uplift.calibrate_uplift import UpliftCalibration
from sklearn.pipeline import make_pipeline
from sklearn.ensemble import RandomForestClassifier
def get_t_data(values, num_obs):
"""Repeats treatment to several rows and reshapes it. Used to append treatments
to explanatory variable dataframe to predict counterfactuals.
Args:
values (array): treatments to predict
Returns:
repeated treatment values
"""
if isinstance(values, np.ndarray):
len_values = len(values)
else:
len_values = 1
values = np.array(values)
values_rep = np.full((num_obs, len_values), values.reshape(1, len_values))
return values_rep
def reduce_concat(x):
"""concatenates object into one string
Args:
x (array): values to concatenate into one string
Returns:
string of objects
"""
return np.array(['_'.join(map(str, q)) for q in x])
class MRUplift(object):
def __init__(self, **kw):
"""I'm putting this in here because I think its necessary for .copy()
function later. Not sure if thats the case.
"""
self.kw = kw
self.__dict__.update(kw)
def fit(self, x, y, t, test_size=0.7, random_state=22, param_grid=None,
n_jobs=-1, cv=5, optimized_loss = False, PCA_x = False, PCA_y = False, bin = False,
use_propensity = False, propensity_score_cutoff = 100):
"""Fits a Neural Network Model of the form y ~ f(t,x). Creates seperate
transformers for y, t, and x and scales each. Assigns train / test split.
Args:
x (np array or pd.dataframe): Explanatory Data of shape num_observations
by num_explanatory_variables
y (np array or pd.dataframe): Response Variables of shape
num_observations by num_response_variables
t (np array or pd.dataframe): Treatment Variables of shape
num_observations by num_treatment columns
test_size (float): Percentage of observations to be in test set
random_state (int): Random state for train test split. This is used in other parts
of class.
param_grid (dict): Parameters of keras model to gridsearch over.
n_jobs (int): number of cores to run on.
cv (int): number of cross vaildation_folds
optimized_loss (boolean): If True will use the optimized loss funcionality.
PCA_x (boolean): If True it will use PCA to preprocess explanatory variables
PCA_y (boolean): If True it will use PCA to preprocess response variables
use_propensity (boolean): If True will use propensity scores from a RF. Best for observational data.
propensity_score_cutoff (float): maximum weight of propensity score. If too high than it wouldn't have had
much support in original model and should probably be exlcuded.
Returns:
Builds a neural network and assigns it to self.model
"""
self.unique_t = np.unique(np.array(t).astype('float'), axis=0)
self.num_t = len(np.unique(t))
self.num_responses = y.shape[1]
self.x = np.array(x).astype('float')
self.y = np.array(y).astype('float')
self.t = np.array(t).astype('float')
self.test_size = test_size
self.random_state = random_state
self.propensity_score_cutoff = propensity_score_cutoff
if isinstance(x, pd.DataFrame):
self.x_names = x.columns.values
else:
self.x_names = None
if isinstance(y, pd.DataFrame):
self.y_names = y.columns.values
else:
self.y_names = None
if isinstance(t, pd.DataFrame):
self.t_names = t.columns.values
else:
self.t_names = None
#if no holdout set the test_size to zero
if self.test_size == 0:
x_train = x
y_train = y
t_train = t
else:
x_train, x_test, y_train, y_test, t_train, t_test = train_test_split(
self.x, self.y, self.t, test_size=self.test_size,
random_state=self.random_state)
#I've found that scaling features helps estimate nets.
#Using PCA of features space should also help.
if PCA_y:
self.y_ss = PCA(whiten = True)
else:
self.y_ss = StandardScaler()
if PCA_x:
self.x_ss = PCA(whiten = True)
elif bin:
self.x_ss = make_pipeline(
KBinsDiscretizer(n_bins = 10),
FunctionTransformer(lambda x: x.todense(), accept_sparse=True)
)
else:
self.x_ss = StandardScaler()
self.t_ss = StandardScaler()
self.y_ss.fit(y_train)
self.x_ss.fit(x_train)
self.t_ss.fit(t_train)
y_train_scaled = self.y_ss.transform(y_train)
x_train_scaled = self.x_ss.transform(x_train)
t_train_scaled = self.t_ss.transform(t_train)
x_t_train = np.concatenate([t_train_scaled, x_train_scaled], axis=1)
str_t_train, str_unique_treatments = treatments_to_text(t_train.astype('float'), self.unique_t)
if optimized_loss:
if use_propensity:
param_grid_propensity = {
'n_estimators': [500],
'max_features': ['auto'],
'max_depth': [1,2,4,8],
'oob_score' : [True],
'n_jobs' : [-1]
}
propensity_model = GridSearchCV(estimator=RandomForestClassifier(max_depth = 8, n_jobs = 1, oob_score = True, n_estimators = 500),
param_grid=param_grid_propensity, cv=3, scoring='neg_log_loss')
propensity_model.fit(x_train_scaled, str_t_train)
self.propensity_model = propensity_model.best_estimator_
propensity_model = propensity_model.best_estimator_
propensity_scores = pd.DataFrame(1/(propensity_model.oob_decision_function_+np.finfo(float).eps))
propensity_scores.columns = propensity_model.classes_
mask_tmt_locations = np.array((propensity_scores < self.propensity_score_cutoff)*1)
str_t_series = pd.Series(str_t_train)
observation_weights = np.array(propensity_scores.lookup(str_t_series.index, str_t_series.values)).reshape(-1,1)
else:
self.propensity_model = None
mask_tmt_locations = np.ones(t_train.shape[0]*len(self.unique_t)).reshape(t_train.shape[0], len(self.unique_t))
observation_weights = get_weights(str_t_train)
keep_locs_on_observations = np.where(observation_weights > self.propensity_score_cutoff/(self.propensity_score_cutoff-1))[0]
net = gridsearch_mo_optim(x_train_scaled[keep_locs_on_observations],
y_train_scaled[keep_locs_on_observations],
t_train_scaled[keep_locs_on_observations],
param_grid=param_grid,
n_splits=cv,
observation_weights=observation_weights[keep_locs_on_observations],
mask_tmt_locations=mask_tmt_locations[keep_locs_on_observations])
self.best_score_net = net[2]
self.best_params_net = net[1]
#only need embedded layer and not whole net
self.model = net[0].get_layer('net_model')
else:
net = train_model_multi_output_w_tmt(x_t_train, y_train_scaled,
param_grid=param_grid,
n_jobs=n_jobs,
cv=cv)
self.best_score_net = net.best_score_
self.best_params_net = net.best_params_
self.model = net.best_estimator_.model
self.propensity_model = None
def predict(self, x, t, response_transformer = False):
"""Returns predictions of the fitted model. Transforms both x and t then
concatenates those two variables into an array to predict on. Finally,
an inverse transformer is applied on predictions to transform to original
response means and standard deviations.
Args:
x (np array or pd.dataframe): Explanatory Data of shape num_observations
by num_explanatory_variables
t (np array or pd.dataframe): Treatment Variables of shape
num_observations by num_treatment columns
response_transformer (boolean): If true will use the trained scaler to transform
responses. I've noticed that using this in production degrades performance
becuase model optimizes scaled data.
Returns:
Predictions fitted model
"""
x_t_new = np.concatenate(
[self.t_ss.transform(t), self.x_ss.transform(x)], axis=1)
preds = self.model.predict(x_t_new)
if response_transformer:
preds = self.y_ss.inverse_transform(preds)
return preds
def predict_ice(self, x=None, treatments=None, calibrator=False,
response_transformer = False):
"""Predicts all counterfactuals with new data. If no new data is
assigned it will use test set data. Can subset to particular treatments
using treatment assignment. Can also apply calibrator function (experimental)
to predictions.
The 'ice' refers to Individual Conditional Expectations. A better
description of this can be found here:
https://arxiv.org/pdf/1309.6392.pdf
Args:
x (np.array): new data to predict. Will use test data if not given
treatments (np.array): Treatments to predict on. If none assigned then
original training treatments are used.
calibrator (boolean): If true will use the trained calibrator to transform
responses. Otherwise will use the response inverse transformer
response_transformer (boolean): If true will use the trained scaler to transform
responses. I've noticed that using this in production degrades performance
becuase model optimizes scaled data.
Returns:
Counterfactuals for all treatments and response variables. an arrary of
num_treatments by num_observations by num_responses
"""
if x is None:
x_train, x_test, y_train, y_test, t_train, t_test = train_test_split(
self.x, self.y, self.t, test_size=self.test_size,
random_state=self.random_state)
else:
x_test = x
if treatments is None:
treatments = self.unique_t
ice = np.array([self.predict(x_test, get_t_data(
t, x_test.shape[0]), response_transformer = response_transformer) for t in treatments])
if calibrator:
ice = self.calibrator.transform(ice)
return ice
def get_erupt_curves(self, x=None, y=None, t=None, objective_weights=None,
treatments=None, calibrator=False,
response_transformer = False,
propensity_score_cutoff = 100):
"""Returns ERUPT Curves and distributions of treatments. If either x or
y or t is not inputted it will use test data.
If there is only one response variable then it will assume we want to maximize the response.
It will calculate and return the ERUPT metric and distribution of treatments.
An introduction to ERUPT metric can be found here https://medium.com/building-ibotta/erupt-expected-response-under-proposed-treatments-ff7dd45c84b4
If there are mutliple responses it will use objective_weights to create a weighted sum of response
variables. It will then use this new weighted sum to determine optimal treatments and calculate ERUPT
metrics accordingly. Repeatedly doing this with different weights will lead estimations of tradeoffs.
ERUPT curves are described in more detail here:
https://medium.com/building-ibotta/estimating-and-visualizing-business-tradeoffs-in-uplift-models-80ff845a5698
Args:
x (np.array): new data to predict. Will use test data if not given
y (np.array): responses
t (np.array): treatments
objective_weights (np.array): of dim (num_weights by num_response_variables).
if none is assigned it will trade off costs of first two response variables and
assume that first column is to be maximized and second column is to be minimized
treatments (np.array): treatments to use in erupt calculations
calibrator (boolean): If true will use the trained calibrator to transform
responses. Otherwise will use the response inverse transformer
response_transformer (boolean): If true will use the trained scaler to transform
responses. I've noticed that using this in production degrades performance
becuase model optimizes scaled data.
Returns:
ERUPT Curves and Treatment Distributions
"""
if self.num_responses == 1:
objective_weights = np.array([1]).reshape(1,-1)
if objective_weights is None:
objective_weights = np.zeros((11, self.num_responses))
objective_weights[:, 0] = np.linspace(0.0, 1.0, num=11)
objective_weights[:, 1] = -np.linspace(1.0, 0.0, num=11)
if any([q is None for q in [x, y, t] ]):
print("Using Test Data Set")
x_train, x, y_train, y, t_train, t = train_test_split(
self.x, self.y, self.t, test_size=self.test_size,
random_state=self.random_state)
if treatments is None:
treatments = self.unique_t
x = np.array(x).astype('float')
t = np.array(t).astype('float')
y = np.array(y).astype('float')
str_t, str_unique_treatments = treatments_to_text(t, treatments)
to_keep_locs = np.where([z in str_unique_treatments for z in str_t])[0]
y = y[to_keep_locs]
t = t[to_keep_locs]
x = x[to_keep_locs]
str_t = np.array(str_t)[to_keep_locs]
ice_preds = self.predict_ice(x, treatments, calibrator, response_transformer)
if self.propensity_model is not None:
prob_tmts = self.propensity_model.predict_proba(self.x_ss.transform(x))+np.finfo(float).eps
propensity_scores = pd.DataFrame(1/prob_tmts)
propensity_scores.columns = self.propensity_model.classes_
propensity_scores = propensity_scores[str_unique_treatments]
mask_tmt_locations = np.array((propensity_scores < self.propensity_score_cutoff)*1)
t_series = pd.Series(str_t)
observation_weights = propensity_scores.lookup(t_series.index, t_series.values)
else:
mask_tmt_locations = np.ones(t.shape[0]*len(treatments)).reshape(t.shape[0], len(treatments))
observation_weights = get_weights(str_t)
return get_erupts_curves_aupc(
y,
np.array(str_t),
ice_preds,
str_unique_treatments,
objective_weights,
names=self.y_names,
mask_tmt_locations = mask_tmt_locations,
observation_weights = np.array(observation_weights))
def copy(self):
"""Copies MRUplift Class. Not sure if this is the best way but it
works.
"""
return copy.copy(self)
def save(self, file_location):
"""Saves MRUplift Class to location. Will save two outputs:
keras model and rest of MRUplift class.
Args:
file_location (str): File location to save data
Returns:
Nothin. Saves file to location
"""
model = self.model
uplift_class_copy = self.copy()
uplift_class_copy.model = None
dill.dump(uplift_class_copy, file = open(file_location + '/mr_uplift_class.pkl', "wb"))
model.save(file_location + '/mr_uplift_model.h5')
def load(self, file_location):
"""Loads MRUplift Class from location.
Args:
file_location (str): File location to load data
Returns:
Updated Uplift Class
"""
uplift_class = dill.load(open(file_location + '/mr_uplift_class.pkl', "rb"))
uplift_class.model = load_model(file_location + '/mr_uplift_model.h5')
self.__dict__.update(uplift_class.__dict__)
def predict_optimal_treatments(self, x, objective_weights=None, treatments=None,
calibrator=False, response_transformer = False,
use_propensity_score_cutoff = True):
"""Calculates optimal treatments of model output given explanatory
variables and weights
Args:
x (np.array): new data to predict. Will use test data if not given
objective_weights (np.array): set of weights of length num_responses to maximize.
is required for multi output decisions
treatments (np.array): Treatments to predict on. If none assigned then
original training treatments are used.
calibrator (boolean): If true will use the trained calibrator to transform
responses. Otherwise will use the response inverse transformer
response_transformer (boolean): If true will use the trained scaler to transform
responses. I've noticed that using this in production degrades performance
becuase model optimizes scaled data.
use_propensity_score_cutoff(boolean): If false it will not mask treatments for predictions based on propensity
scores.
Returns:
Optimal Treatment Values
"""
x = np.array(x).astype('float')
if treatments is None:
treatments = self.unique_t
if self.propensity_model is not None and use_propensity_score_cutoff:
prob_tmts = self.propensity_model.predict_proba(self.x_ss.transform(x))+np.finfo(float).eps
propensity_scores = pd.DataFrame(1/prob_tmts)
propensity_scores.columns = self.propensity_model.classes_
unique_t = reduce_concat(treatments)
propensity_scores = propensity_scores[unique_t]
mask_tmt_locations = np.array((propensity_scores < self.propensity_score_cutoff)*1)
else:
mask_tmt_locations = None
ice = self.predict_ice(x, treatments, calibrator, response_transformer = response_transformer)
if self.num_responses > 1:
best_treatments = get_best_tmts(objective_weights, ice, treatments,
mask_tmt_locations = mask_tmt_locations)
else:
if self.propensity_model is not None and use_propensity_score_cutoff:
ice = ice[:,:,0].T
ice = softmax(ice)*mask_tmt_locations
ice = ice.T
else:
ice = ice[:,:,0]
best_treatments = treatments[np.argmax(ice, axis=0)]
return best_treatments
def calibrate(self, response_transformer = False, treatments=None):
"""(Experimental)
This fits a calibrator on training dataset. This of the form
y = b0y_pred_0*t_0+b1*y_pred_1*t_1 + ... + b_num_tmts*y_pred_numtmts*t_num_tmts for all treatments.
response_transformer (boolean): If true will use the trained scaler to transform
responses. I've noticed that using this in production degrades performance
becuase model optimizes scaled data.
Args:
None
Returns:
None
"""
x_train, x, y_train, y, t_train, t = train_test_split(
self.x, self.y, self.t, test_size=self.test_size,
random_state=self.random_state)
if t_train.shape[1] > 1:
t_train = reduce_concat(t_train)
t_train = t_train.reshape(-1, 1)
ice = self.predict_ice(x_train, treatments, response_transformer)
calib = UpliftCalibration()
calib.fit(ice, y_train, t_train)
calib.uplift_scores()
self.calibrator = calib
def permutation_varimp(self, objective_weights=None, x=None, treatments=None, calibrator=False,
num_sample = 10000):
"""Variable importance metrics. This is based on permutation tests. For variable this permutes the column
and then predicts and finds the optimal value given a set of weights. For each user it compares the optimal treatment of
permuted column data with optimal treatment of non-permuted data and averages the result. The output is an index of how often
the permuted column disagrees with unpermuted columns.
Args:
objective_weights (np.array): set of weights of length num_responses to maximize.
is required for multi output decisions
x (np.array): new data to predict. Will use test data if not given
treatments (np.array): Treatments to predict on. If none assigned then
original training treatments are used.
calibrator (boolean): If true will use the trained calibrator to transform
responses. Otherwise will use the response inverse transformer
num_sample (int): Number of observations to sample for calculations. Used to reduce
time of function.
Returns:
df of variable importance metrics
"""
if x is None:
x_train, x, y_train, y, t_train, t = train_test_split(
self.x, self.y, self.t, test_size=self.test_size,
random_state=self.random_state)
x = np.array(x).astype('float')
t = np.array(t).astype('float')
y = np.array(y).astype('float')
#subset number of observations
x = x[:num_sample]
original_decisions = self.predict_optimal_treatments(x,
objective_weights=objective_weights, treatments=treatments,
calibrator=calibrator)
varimps = []
for p in range(x.shape[1]):
shuffled_index = np.arange(x.shape[0])
np.random.shuffle(shuffled_index)
shuffled_index = shuffled_index
x_copy = x.copy()
x_copy[:,p] = x_copy[:,p][shuffled_index]
temp_decisions = self.predict_optimal_treatments(x_copy,
objective_weights=objective_weights, treatments=treatments,
calibrator=calibrator)
temp_varimp = (original_decisions == temp_decisions).mean()
varimps.append(temp_varimp)
#make varimps a 'larger number -> more important' metric
varimps = 1 - np.array(varimps)
varimps_pd = pd.DataFrame(np.array(varimps))
varimps_pd.columns = ['permutation_varimp_metric']
if self.x_names is not None:
varimps_pd['var_names'] = self.x_names
return varimps_pd
def get_random_erupts(self, x = None, y = None, t = None, objective_weights = None,
treatments = None, calibrator = None, random_seed = 22,
response_transformer = False, propensity_score_cutoff = 100):
"""OOS metric calculation for full range of a ranom set of objective weights.
Idea is to calculate full range of objective functions. Here each observation
is assigned a random objective function and the ERUPT is calculated on this.
Args:
x (np.array): new data to predict. Will use test data if not given
y (np.array): responses
t (np.array): treatments
objective_weights (np.array): of dim (num_observations by num_response_variables).
if none is assigned it randomly create weights
treatments (np.array): treatments to use in erupt calculations
calibrator (boolean): If true will use the trained calibrator to transform
responses. Otherwise will use the response inverse transformer
random_seed (int): seed for random weights matrix if none are assigned
response_transformer (boolean): If true will use the trained scaler to transform
responses. I've noticed that using this in production degrades performance
becuase model optimizes scaled data.
Returns:
mean and standardization of ERUPT
"""
if treatments is None:
treatments = self.unique_t
if x is None:
x_train, x, y_train, y, t_train, t = train_test_split(
self.x, self.y, self.t, test_size=self.test_size,
random_state=self.random_state)
x = np.array(x).astype('float')
t = np.array(t).astype('float')
y = np.array(y).astype('float')
if objective_weights is None:
objective_weights = get_random_weights(y, random_seed)
str_t, str_unique_treatments = treatments_to_text(t, treatments)
str_t_series = pd.Series(str_t)
if self.propensity_model is not None:
prob_tmts = self.propensity_model.predict_proba(self.x_ss.transform(x))+np.finfo(float).eps
propensity_scores = pd.DataFrame(1/prob_tmts)
propensity_scores.columns = self.propensity_model.classes_
mask_tmt_locations = np.array((propensity_scores < propensity_score_cutoff)*1)
observation_weights = propensity_scores.lookup(str_t_series.index, str_t_series.values)
else:
mask_tmt_locations = None
observation_weights = get_weights(str_t)
tmt_effects = pd.DataFrame(y)
tmt_effects['tmt'] = str_t
tmt_effects_mean = tmt_effects.groupby('tmt').mean()
utility_by_best_global_tmt = np.array([(objective_weights*x).sum(axis=1) for x in tmt_effects_mean])
best_single_tmt = tmt_effects_mean.index.values[utility_by_best_global_tmt.argmax(axis = 0)]
utility = (objective_weights*y).sum(axis=1)
y_temp = np.concatenate([y, utility.reshape(-1,1)], axis = 1)
optim_tmt = self.predict_optimal_treatments(x,
objective_weights=objective_weights, treatments=treatments,
calibrator=calibrator, response_transformer = response_transformer)
new_y = (objective_weights*y).sum(axis = 1).reshape(-1,1)
erupt_new_y = erupt(new_y, t, optim_tmt, weights = observation_weights)
erupt_base_tmt = erupt(new_y, str_t_series, best_single_tmt, weights = observation_weights.reshape(-1,1))
erupt_new_y['type'] = 'model'
erupt_base_tmt['type'] = 'ate'
erupts = pd.concat([erupt_new_y, erupt_base_tmt])
return(erupts)
| 42.339478
| 155
| 0.64456
|
3b5f8ab06d6515dcac2ef2f1cf714ba7d9a63841
| 13,601
|
py
|
Python
|
nova/tests/unit/scheduler/test_weights.py
|
gabriel-samfira/nova
|
5ef07cc04dbf0216452ae358e57d9ddac51f1803
|
[
"Apache-2.0"
] | null | null | null |
nova/tests/unit/scheduler/test_weights.py
|
gabriel-samfira/nova
|
5ef07cc04dbf0216452ae358e57d9ddac51f1803
|
[
"Apache-2.0"
] | null | null | null |
nova/tests/unit/scheduler/test_weights.py
|
gabriel-samfira/nova
|
5ef07cc04dbf0216452ae358e57d9ddac51f1803
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2011-2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Tests For Scheduler weights.
"""
from oslo.serialization import jsonutils
from oslotest import mockpatch
from nova import context
from nova import exception
from nova.scheduler import weights
from nova import test
from nova.tests.unit import matchers
from nova.tests.unit.scheduler import fakes
class TestWeighedHost(test.NoDBTestCase):
def test_dict_conversion(self):
host_state = fakes.FakeHostState('somehost', None, {})
host = weights.WeighedHost(host_state, 'someweight')
expected = {'weight': 'someweight',
'host': 'somehost'}
self.assertThat(host.to_dict(), matchers.DictMatches(expected))
def test_all_weighers(self):
classes = weights.all_weighers()
class_names = [cls.__name__ for cls in classes]
self.assertIn('RAMWeigher', class_names)
self.assertIn('MetricsWeigher', class_names)
self.assertIn('IoOpsWeigher', class_names)
class RamWeigherTestCase(test.NoDBTestCase):
def setUp(self):
super(RamWeigherTestCase, self).setUp()
self.useFixture(mockpatch.Patch(
'nova.db.compute_node_get_all',
return_value=fakes.COMPUTE_NODES))
self.host_manager = fakes.FakeHostManager()
self.weight_handler = weights.HostWeightHandler()
self.weight_classes = self.weight_handler.get_matching_classes(
['nova.scheduler.weights.ram.RAMWeigher'])
def _get_weighed_host(self, hosts, weight_properties=None):
if weight_properties is None:
weight_properties = {}
return self.weight_handler.get_weighed_objects(self.weight_classes,
hosts, weight_properties)[0]
def _get_all_hosts(self):
ctxt = context.get_admin_context()
return self.host_manager.get_all_host_states(ctxt)
def test_default_of_spreading_first(self):
hostinfo_list = self._get_all_hosts()
# host1: free_ram_mb=512
# host2: free_ram_mb=1024
# host3: free_ram_mb=3072
# host4: free_ram_mb=8192
# so, host4 should win:
weighed_host = self._get_weighed_host(hostinfo_list)
self.assertEqual(1.0, weighed_host.weight)
self.assertEqual('host4', weighed_host.obj.host)
def test_ram_filter_multiplier1(self):
self.flags(ram_weight_multiplier=0.0)
hostinfo_list = self._get_all_hosts()
# host1: free_ram_mb=512
# host2: free_ram_mb=1024
# host3: free_ram_mb=3072
# host4: free_ram_mb=8192
# We do not know the host, all have same weight.
weighed_host = self._get_weighed_host(hostinfo_list)
self.assertEqual(0.0, weighed_host.weight)
def test_ram_filter_multiplier2(self):
self.flags(ram_weight_multiplier=2.0)
hostinfo_list = self._get_all_hosts()
# host1: free_ram_mb=512
# host2: free_ram_mb=1024
# host3: free_ram_mb=3072
# host4: free_ram_mb=8192
# so, host4 should win:
weighed_host = self._get_weighed_host(hostinfo_list)
self.assertEqual(1.0 * 2, weighed_host.weight)
self.assertEqual('host4', weighed_host.obj.host)
def test_ram_filter_negative(self):
self.flags(ram_weight_multiplier=1.0)
hostinfo_list = self._get_all_hosts()
host_attr = {'id': 100, 'memory_mb': 8192, 'free_ram_mb': -512}
host_state = fakes.FakeHostState('negative', 'negative', host_attr)
hostinfo_list = list(hostinfo_list) + [host_state]
# host1: free_ram_mb=512
# host2: free_ram_mb=1024
# host3: free_ram_mb=3072
# host4: free_ram_mb=8192
# negativehost: free_ram_mb=-512
# so, host4 should win
weights = self.weight_handler.get_weighed_objects(self.weight_classes,
hostinfo_list, {})
weighed_host = weights[0]
self.assertEqual(1, weighed_host.weight)
self.assertEqual('host4', weighed_host.obj.host)
# and negativehost should lose
weighed_host = weights[-1]
self.assertEqual(0, weighed_host.weight)
self.assertEqual('negative', weighed_host.obj.host)
class MetricsWeigherTestCase(test.NoDBTestCase):
def setUp(self):
super(MetricsWeigherTestCase, self).setUp()
self.useFixture(mockpatch.Patch(
'nova.db.compute_node_get_all',
return_value=fakes.COMPUTE_NODES_METRICS))
self.host_manager = fakes.FakeHostManager()
self.weight_handler = weights.HostWeightHandler()
self.weight_classes = self.weight_handler.get_matching_classes(
['nova.scheduler.weights.metrics.MetricsWeigher'])
def _get_weighed_host(self, hosts, setting, weight_properties=None):
if not weight_properties:
weight_properties = {}
self.flags(weight_setting=setting, group='metrics')
return self.weight_handler.get_weighed_objects(self.weight_classes,
hosts, weight_properties)[0]
def _get_all_hosts(self):
ctxt = context.get_admin_context()
return self.host_manager.get_all_host_states(ctxt)
def _do_test(self, settings, expected_weight, expected_host):
hostinfo_list = self._get_all_hosts()
weighed_host = self._get_weighed_host(hostinfo_list, settings)
self.assertEqual(expected_weight, weighed_host.weight)
self.assertEqual(expected_host, weighed_host.obj.host)
def test_single_resource(self):
# host1: foo=512
# host2: foo=1024
# host3: foo=3072
# host4: foo=8192
# so, host4 should win:
setting = ['foo=1']
self._do_test(setting, 1.0, 'host4')
def test_multiple_resource(self):
# host1: foo=512, bar=1
# host2: foo=1024, bar=2
# host3: foo=3072, bar=1
# host4: foo=8192, bar=0
# so, host2 should win:
setting = ['foo=0.0001', 'bar=1']
self._do_test(setting, 1.0, 'host2')
def test_single_resourcenegtive_ratio(self):
# host1: foo=512
# host2: foo=1024
# host3: foo=3072
# host4: foo=8192
# so, host1 should win:
setting = ['foo=-1']
self._do_test(setting, 1.0, 'host1')
def test_multiple_resource_missing_ratio(self):
# host1: foo=512, bar=1
# host2: foo=1024, bar=2
# host3: foo=3072, bar=1
# host4: foo=8192, bar=0
# so, host4 should win:
setting = ['foo=0.0001', 'bar']
self._do_test(setting, 1.0, 'host4')
def test_multiple_resource_wrong_ratio(self):
# host1: foo=512, bar=1
# host2: foo=1024, bar=2
# host3: foo=3072, bar=1
# host4: foo=8192, bar=0
# so, host4 should win:
setting = ['foo=0.0001', 'bar = 2.0t']
self._do_test(setting, 1.0, 'host4')
def _check_parsing_result(self, weigher, setting, results):
self.flags(weight_setting=setting, group='metrics')
weigher._parse_setting()
self.assertEqual(len(weigher.setting), len(results))
for item in results:
self.assertIn(item, weigher.setting)
def test_parse_setting(self):
weigher = self.weight_classes[0]()
self._check_parsing_result(weigher,
['foo=1'],
[('foo', 1.0)])
self._check_parsing_result(weigher,
['foo=1', 'bar=-2.1'],
[('foo', 1.0), ('bar', -2.1)])
self._check_parsing_result(weigher,
['foo=a1', 'bar=-2.1'],
[('bar', -2.1)])
self._check_parsing_result(weigher,
['foo', 'bar=-2.1'],
[('bar', -2.1)])
self._check_parsing_result(weigher,
['=5', 'bar=-2.1'],
[('bar', -2.1)])
def test_metric_not_found_required(self):
setting = ['foo=1', 'zot=2']
self.assertRaises(exception.ComputeHostMetricNotFound,
self._do_test,
setting,
8192,
'host4')
def test_metric_not_found_non_required(self):
# host1: foo=512, bar=1
# host2: foo=1024, bar=2
# host3: foo=3072, bar=1
# host4: foo=8192, bar=0
# host5: foo=768, bar=0, zot=1
# host6: foo=2048, bar=0, zot=2
# so, host5 should win:
self.flags(required=False, group='metrics')
setting = ['foo=0.0001', 'zot=-1']
self._do_test(setting, 1.0, 'host5')
COMPUTE_NODES_IO_OPS = [
# host1: num_io_ops=1
dict(id=1, local_gb=1024, memory_mb=1024, vcpus=1,
disk_available_least=None, free_ram_mb=512, vcpus_used=1,
free_disk_gb=512, local_gb_used=0, updated_at=None,
service=dict(host='host1', disabled=False),
hypervisor_hostname='node1', host_ip='127.0.0.1',
hypervisor_version=0, numa_topology=None,
stats=jsonutils.dumps({'io_workload': '1'})),
# host2: num_io_ops=2
dict(id=2, local_gb=2048, memory_mb=2048, vcpus=2,
disk_available_least=1024, free_ram_mb=1024, vcpus_used=2,
free_disk_gb=1024, local_gb_used=0, updated_at=None,
service=dict(host='host2', disabled=True),
hypervisor_hostname='node2', host_ip='127.0.0.1',
hypervisor_version=0, numa_topology=None,
stats=jsonutils.dumps({'io_workload': '2'})),
# host3: num_io_ops=0, so host3 should win in the case of default
# io_ops_weight_multiplier configure.
dict(id=3, local_gb=4096, memory_mb=4096, vcpus=4,
disk_available_least=3333, free_ram_mb=3072, vcpus_used=1,
free_disk_gb=3072, local_gb_used=0, updated_at=None,
service=dict(host='host3', disabled=False),
hypervisor_hostname='node3', host_ip='127.0.0.1',
hypervisor_version=0, numa_topology=None,
stats=jsonutils.dumps({'io_workload': '0'})),
# host4: num_io_ops=4, so host4 should win in the case of positive
# io_ops_weight_multiplier configure.
dict(id=4, local_gb=8192, memory_mb=8192, vcpus=8,
disk_available_least=8192, free_ram_mb=8192, vcpus_used=0,
free_disk_gb=8888, local_gb_used=0, updated_at=None,
service=dict(host='host4', disabled=False),
hypervisor_hostname='node4', host_ip='127.0.0.1',
hypervisor_version=0, numa_topology=None,
stats=jsonutils.dumps({'io_workload': '4'})),
# Broken entry
dict(id=5, local_gb=1024, memory_mb=1024, vcpus=1, service=None),
]
class IoOpsWeigherTestCase(test.NoDBTestCase):
def setUp(self):
super(IoOpsWeigherTestCase, self).setUp()
self.useFixture(mockpatch.Patch(
'nova.db.compute_node_get_all',
return_value=COMPUTE_NODES_IO_OPS))
self.host_manager = fakes.FakeHostManager()
self.weight_handler = weights.HostWeightHandler()
self.weight_classes = self.weight_handler.get_matching_classes(
['nova.scheduler.weights.io_ops.IoOpsWeigher'])
def _get_weighed_host(self, hosts, io_ops_weight_multiplier):
if io_ops_weight_multiplier is not None:
self.flags(io_ops_weight_multiplier=io_ops_weight_multiplier)
return self.weight_handler.get_weighed_objects(self.weight_classes,
hosts, {})[0]
def _get_all_hosts(self):
ctxt = context.get_admin_context()
return self.host_manager.get_all_host_states(ctxt)
def _do_test(self, io_ops_weight_multiplier, expected_weight,
expected_host):
hostinfo_list = self._get_all_hosts()
weighed_host = self._get_weighed_host(hostinfo_list,
io_ops_weight_multiplier)
self.assertEqual(weighed_host.weight, expected_weight)
if expected_host:
self.assertEqual(weighed_host.obj.host, expected_host)
def test_io_ops_weight_multiplier_by_default(self):
self._do_test(io_ops_weight_multiplier=None,
expected_weight=0.0,
expected_host='host3')
def test_io_ops_weight_multiplier_zero_value(self):
# We do not know the host, all have same weight.
self._do_test(io_ops_weight_multiplier=0.0,
expected_weight=0.0,
expected_host=None)
def test_io_ops_weight_multiplier_positive_value(self):
self._do_test(io_ops_weight_multiplier=2.0,
expected_weight=2.0,
expected_host='host4')
| 40.120944
| 78
| 0.621793
|
220a12a51a49e8b0f770419de5da114152a1053a
| 32,556
|
py
|
Python
|
mne/decoding/transformer.py
|
joewalter/mne-python
|
b0629bea7f5e8e94d9e2e889f45a35f9657e6dbc
|
[
"BSD-3-Clause"
] | null | null | null |
mne/decoding/transformer.py
|
joewalter/mne-python
|
b0629bea7f5e8e94d9e2e889f45a35f9657e6dbc
|
[
"BSD-3-Clause"
] | null | null | null |
mne/decoding/transformer.py
|
joewalter/mne-python
|
b0629bea7f5e8e94d9e2e889f45a35f9657e6dbc
|
[
"BSD-3-Clause"
] | null | null | null |
# Authors: Mainak Jas <mainak@neuro.hut.fi>
# Alexandre Gramfort <alexandre.gramfort@telecom-paristech.fr>
# Romain Trachel <trachelr@gmail.com>
#
# License: BSD (3-clause)
import numpy as np
from .mixin import TransformerMixin
from .base import BaseEstimator
from .. import pick_types
from ..filter import (low_pass_filter, high_pass_filter, band_pass_filter,
band_stop_filter, filter_data, _triage_filter_params)
from ..time_frequency.psd import _psd_multitaper
from ..externals import six
from ..utils import _check_type_picks, deprecated
class Scaler(TransformerMixin):
"""Standardizes data across channels
Parameters
----------
info : instance of Info
The measurement info
with_mean : boolean, True by default
If True, center the data before scaling.
with_std : boolean, True by default
If True, scale the data to unit variance (or equivalently,
unit standard deviation).
Attributes
----------
info : instance of Info
The measurement info
``ch_mean_`` : dict
The mean value for each channel type
``std_`` : dict
The standard deviation for each channel type
"""
def __init__(self, info, with_mean=True, with_std=True):
self.info = info
self.with_mean = with_mean
self.with_std = with_std
self.ch_mean_ = dict() # TODO rename attribute
self.std_ = dict() # TODO rename attribute
def fit(self, epochs_data, y):
"""Standardizes data across channels
Parameters
----------
epochs_data : array, shape (n_epochs, n_channels, n_times)
The data to concatenate channels.
y : array, shape (n_epochs,)
The label for each epoch.
Returns
-------
self : instance of Scaler
Returns the modified instance.
"""
if not isinstance(epochs_data, np.ndarray):
raise ValueError("epochs_data should be of type ndarray (got %s)."
% type(epochs_data))
X = np.atleast_3d(epochs_data)
picks_list = dict()
picks_list['mag'] = pick_types(self.info, meg='mag', ref_meg=False,
exclude='bads')
picks_list['grad'] = pick_types(self.info, meg='grad', ref_meg=False,
exclude='bads')
picks_list['eeg'] = pick_types(self.info, eeg=True, ref_meg=False,
meg=False, exclude='bads')
self.picks_list_ = picks_list
for key, this_pick in picks_list.items():
if self.with_mean:
ch_mean = X[:, this_pick, :].mean(axis=1)[:, None, :]
self.ch_mean_[key] = ch_mean # TODO rename attribute
if self.with_std:
ch_std = X[:, this_pick, :].mean(axis=1)[:, None, :]
self.std_[key] = ch_std # TODO rename attribute
return self
def transform(self, epochs_data, y=None):
"""Standardizes data across channels
Parameters
----------
epochs_data : array, shape (n_epochs, n_channels, n_times)
The data.
y : None | array, shape (n_epochs,)
The label for each epoch.
If None not used. Defaults to None.
Returns
-------
X : array, shape (n_epochs, n_channels, n_times)
The data concatenated over channels.
"""
if not isinstance(epochs_data, np.ndarray):
raise ValueError("epochs_data should be of type ndarray (got %s)."
% type(epochs_data))
X = np.atleast_3d(epochs_data)
for key, this_pick in six.iteritems(self.picks_list_):
if self.with_mean:
X[:, this_pick, :] -= self.ch_mean_[key]
if self.with_std:
X[:, this_pick, :] /= self.std_[key]
return X
def inverse_transform(self, epochs_data, y=None):
""" Inverse standardization of data across channels
Parameters
----------
epochs_data : array, shape (n_epochs, n_channels, n_times)
The data.
y : None | array, shape (n_epochs,)
The label for each epoch.
If None not used. Defaults to None.
Returns
-------
X : array, shape (n_epochs, n_channels, n_times)
The data concatenated over channels.
"""
if not isinstance(epochs_data, np.ndarray):
raise ValueError("epochs_data should be of type ndarray (got %s)."
% type(epochs_data))
X = np.atleast_3d(epochs_data)
for key, this_pick in six.iteritems(self.picks_list_):
if self.with_mean:
X[:, this_pick, :] += self.ch_mean_[key]
if self.with_std:
X[:, this_pick, :] *= self.std_[key]
return X
@deprecated("EpochsVectorizer will be deprecated in version 0.14; "
"use Vectorizer instead")
class EpochsVectorizer(TransformerMixin):
"""EpochsVectorizer transforms epoch data to fit into a scikit-learn pipeline.
Parameters
----------
info : instance of Info
The measurement info.
Attributes
----------
n_channels : int
The number of channels.
n_times : int
The number of time points.
"""
def __init__(self, info=None):
self.info = info
self.n_channels = None
self.n_times = None
def fit(self, epochs_data, y):
"""For each epoch, concatenate data from different channels into a single
feature vector.
Parameters
----------
epochs_data : array, shape (n_epochs, n_channels, n_times)
The data to concatenate channels.
y : array, shape (n_epochs,)
The label for each epoch.
Returns
-------
self : instance of EpochsVectorizer
returns the modified instance
"""
if not isinstance(epochs_data, np.ndarray):
raise ValueError("epochs_data should be of type ndarray (got %s)."
% type(epochs_data))
return self
def transform(self, epochs_data, y=None):
"""For each epoch, concatenate data from different channels into a single
feature vector.
Parameters
----------
epochs_data : array, shape (n_epochs, n_channels, n_times)
The data.
y : None | array, shape (n_epochs,)
The label for each epoch.
If None not used. Defaults to None.
Returns
-------
X : array, shape (n_epochs, n_channels * n_times)
The data concatenated over channels
"""
if not isinstance(epochs_data, np.ndarray):
raise ValueError("epochs_data should be of type ndarray (got %s)."
% type(epochs_data))
epochs_data = np.atleast_3d(epochs_data)
n_epochs, n_channels, n_times = epochs_data.shape
X = epochs_data.reshape(n_epochs, n_channels * n_times)
# save attributes for inverse_transform
self.n_epochs = n_epochs
self.n_channels = n_channels
self.n_times = n_times
return X
def inverse_transform(self, X, y=None):
"""For each epoch, reshape a feature vector into the original data shape
Parameters
----------
X : array, shape (n_epochs, n_channels * n_times)
The feature vector concatenated over channels
y : None | array, shape (n_epochs,)
The label for each epoch.
If None not used. Defaults to None.
Returns
-------
epochs_data : array, shape (n_epochs, n_channels, n_times)
The original data
"""
if not isinstance(X, np.ndarray):
raise ValueError("epochs_data should be of type ndarray (got %s)."
% type(X))
return X.reshape(-1, self.n_channels, self.n_times)
class Vectorizer(TransformerMixin):
"""Transforms n-dimensional array into 2D array of n_samples by n_features.
This class reshapes an n-dimensional array into an n_samples * n_features
array, usable by the estimators and transformers of scikit-learn.
Examples
--------
clf = make_pipeline(SpatialFilter(), _XdawnTransformer(), Vectorizer(),
LogisticRegression())
Attributes
----------
``features_shape_`` : tuple
Stores the original shape of data.
"""
def fit(self, X, y=None):
"""Stores the shape of the features of X.
Parameters
----------
X : array-like
The data to fit. Can be, for example a list, or an array of at
least 2d. The first dimension must be of length n_samples, where
samples are the independent samples used by the estimator
(e.g. n_epochs for epoched data).
y : None | array, shape (n_samples,)
Used for scikit-learn compatibility.
Returns
-------
self : Instance of Vectorizer
Return the modified instance.
"""
X = np.asarray(X)
self.features_shape_ = X.shape[1:]
return self
def transform(self, X):
"""Convert given array into two dimensions.
Parameters
----------
X : array-like
The data to fit. Can be, for example a list, or an array of at
least 2d. The first dimension must be of length n_samples, where
samples are the independent samples used by the estimator
(e.g. n_epochs for epoched data).
Returns
-------
X : array, shape (n_samples, n_features)
The transformed data.
"""
X = np.asarray(X)
if X.shape[1:] != self.features_shape_:
raise ValueError("Shape of X used in fit and transform must be "
"same")
return X.reshape(len(X), -1)
def fit_transform(self, X, y=None):
"""Fit the data, then transform in one step.
Parameters
----------
X : array-like
The data to fit. Can be, for example a list, or an array of at
least 2d. The first dimension must be of length n_samples, where
samples are the independent samples used by the estimator
(e.g. n_epochs for epoched data).
y : None | array, shape (n_samples,)
Used for scikit-learn compatibility.
Returns
-------
X : array, shape (n_samples, -1)
The transformed data.
"""
return self.fit(X).transform(X)
def inverse_transform(self, X):
"""Transform 2D data back to its original feature shape.
Parameters
----------
X : array-like, shape (n_samples, n_features)
Data to be transformed back to original shape.
Returns
-------
X : array
The data transformed into shape as used in fit. The first
dimension is of length n_samples.
"""
X = np.asarray(X)
if X.ndim != 2:
raise ValueError("X should be of 2 dimensions but given has %s "
"dimension(s)" % X.ndim)
return X.reshape((len(X),) + self.features_shape_)
class PSDEstimator(TransformerMixin):
"""Compute power spectrum density (PSD) using a multi-taper method
Parameters
----------
sfreq : float
The sampling frequency.
fmin : float
The lower frequency of interest.
fmax : float
The upper frequency of interest.
bandwidth : float
The bandwidth of the multi taper windowing function in Hz.
adaptive : bool
Use adaptive weights to combine the tapered spectra into PSD
(slow, use n_jobs >> 1 to speed up computation).
low_bias : bool
Only use tapers with more than 90% spectral concentration within
bandwidth.
n_jobs : int
Number of parallel jobs to use (only used if adaptive=True).
normalization : str
Either "full" or "length" (default). If "full", the PSD will
be normalized by the sampling rate as well as the length of
the signal (as in nitime).
verbose : bool, str, int, or None
If not None, override default verbose level (see mne.verbose).
See Also
--------
psd_multitaper
"""
def __init__(self, sfreq=2 * np.pi, fmin=0, fmax=np.inf, bandwidth=None,
adaptive=False, low_bias=True, n_jobs=1,
normalization='length', verbose=None):
self.sfreq = sfreq
self.fmin = fmin
self.fmax = fmax
self.bandwidth = bandwidth
self.adaptive = adaptive
self.low_bias = low_bias
self.n_jobs = n_jobs
self.verbose = verbose
self.normalization = normalization
def fit(self, epochs_data, y):
"""Compute power spectrum density (PSD) using a multi-taper method
Parameters
----------
epochs_data : array, shape (n_epochs, n_channels, n_times)
The data.
y : array, shape (n_epochs,)
The label for each epoch
Returns
-------
self : instance of PSDEstimator
returns the modified instance
"""
if not isinstance(epochs_data, np.ndarray):
raise ValueError("epochs_data should be of type ndarray (got %s)."
% type(epochs_data))
return self
def transform(self, epochs_data, y=None):
"""Compute power spectrum density (PSD) using a multi-taper method
Parameters
----------
epochs_data : array, shape (n_epochs, n_channels, n_times)
The data
y : None | array, shape (n_epochs,)
The label for each epoch.
If None not used. Defaults to None.
Returns
-------
psd : array, shape (n_signals, len(freqs)) or (len(freqs),)
The computed PSD.
"""
if not isinstance(epochs_data, np.ndarray):
raise ValueError("epochs_data should be of type ndarray (got %s)."
% type(epochs_data))
psd, _ = _psd_multitaper(
epochs_data, sfreq=self.sfreq, fmin=self.fmin, fmax=self.fmax,
bandwidth=self.bandwidth, adaptive=self.adaptive,
low_bias=self.low_bias, normalization=self.normalization,
n_jobs=self.n_jobs)
return psd
class FilterEstimator(TransformerMixin):
"""Estimator to filter RtEpochs
Applies a zero-phase low-pass, high-pass, band-pass, or band-stop
filter to the channels selected by "picks".
l_freq and h_freq are the frequencies below which and above which,
respectively, to filter out of the data. Thus the uses are:
- l_freq < h_freq: band-pass filter
- l_freq > h_freq: band-stop filter
- l_freq is not None, h_freq is None: low-pass filter
- l_freq is None, h_freq is not None: high-pass filter
If n_jobs > 1, more memory is required as "len(picks) * n_times"
additional time points need to be temporarily stored in memory.
Parameters
----------
info : instance of Info
Measurement info.
l_freq : float | None
Low cut-off frequency in Hz. If None the data are only low-passed.
h_freq : float | None
High cut-off frequency in Hz. If None the data are only
high-passed.
picks : array-like of int | None
Indices of channels to filter. If None only the data (MEG/EEG)
channels will be filtered.
filter_length : str (Default: '10s') | int | None
Length of the filter to use. If None or "len(x) < filter_length",
the filter length used is len(x). Otherwise, if int, overlap-add
filtering with a filter of the specified length in samples) is
used (faster for long signals). If str, a human-readable time in
units of "s" or "ms" (e.g., "10s" or "5500ms") will be converted
to the shortest power-of-two length at least that duration.
l_trans_bandwidth : float
Width of the transition band at the low cut-off frequency in Hz.
h_trans_bandwidth : float
Width of the transition band at the high cut-off frequency in Hz.
n_jobs : int | str
Number of jobs to run in parallel. Can be 'cuda' if scikits.cuda
is installed properly, CUDA is initialized, and method='fft'.
method : str
'fft' will use overlap-add FIR filtering, 'iir' will use IIR
forward-backward filtering (via filtfilt).
iir_params : dict | None
Dictionary of parameters to use for IIR filtering.
See mne.filter.construct_iir_filter for details. If iir_params
is None and method="iir", 4th order Butterworth will be used.
verbose : bool, str, int, or None
If not None, override default verbose level (see mne.verbose).
Defaults to self.verbose.
See Also
--------
TemporalFilter
"""
def __init__(self, info, l_freq, h_freq, picks=None, filter_length='',
l_trans_bandwidth=None, h_trans_bandwidth=None, n_jobs=1,
method='fft', iir_params=None, verbose=None):
self.info = info
self.l_freq = l_freq
self.h_freq = h_freq
self.picks = _check_type_picks(picks)
self.filter_length = filter_length
self.l_trans_bandwidth = l_trans_bandwidth
self.h_trans_bandwidth = h_trans_bandwidth
self.n_jobs = n_jobs
self.method = method
self.iir_params = iir_params
def fit(self, epochs_data, y):
"""Filters data
Parameters
----------
epochs_data : array, shape (n_epochs, n_channels, n_times)
The data.
y : array, shape (n_epochs,)
The label for each epoch.
Returns
-------
self : instance of FilterEstimator
Returns the modified instance
"""
if not isinstance(epochs_data, np.ndarray):
raise ValueError("epochs_data should be of type ndarray (got %s)."
% type(epochs_data))
if self.picks is None:
self.picks = pick_types(self.info, meg=True, eeg=True,
ref_meg=False, exclude=[])
if self.l_freq == 0:
self.l_freq = None
if self.h_freq is not None and self.h_freq > (self.info['sfreq'] / 2.):
self.h_freq = None
if self.l_freq is not None and not isinstance(self.l_freq, float):
self.l_freq = float(self.l_freq)
if self.h_freq is not None and not isinstance(self.h_freq, float):
self.h_freq = float(self.h_freq)
if self.info['lowpass'] is None or (self.h_freq is not None and
(self.l_freq is None or
self.l_freq < self.h_freq) and
self.h_freq <
self.info['lowpass']):
self.info['lowpass'] = self.h_freq
if self.info['highpass'] is None or (self.l_freq is not None and
(self.h_freq is None or
self.l_freq < self.h_freq) and
self.l_freq >
self.info['highpass']):
self.info['highpass'] = self.l_freq
return self
def transform(self, epochs_data, y=None):
"""Filters data
Parameters
----------
epochs_data : array, shape (n_epochs, n_channels, n_times)
The data.
y : None | array, shape (n_epochs,)
The label for each epoch.
If None not used. Defaults to None.
Returns
-------
X : array, shape (n_epochs, n_channels, n_times)
The data after filtering
"""
if not isinstance(epochs_data, np.ndarray):
raise ValueError("epochs_data should be of type ndarray (got %s)."
% type(epochs_data))
epochs_data = np.atleast_3d(epochs_data)
if self.l_freq is None and self.h_freq is not None:
epochs_data = \
low_pass_filter(epochs_data, self.info['sfreq'], self.h_freq,
filter_length=self.filter_length,
trans_bandwidth=self.l_trans_bandwidth,
method=self.method, iir_params=self.iir_params,
picks=self.picks, n_jobs=self.n_jobs,
copy=False, verbose=False)
if self.l_freq is not None and self.h_freq is None:
epochs_data = \
high_pass_filter(epochs_data, self.info['sfreq'], self.l_freq,
filter_length=self.filter_length,
trans_bandwidth=self.h_trans_bandwidth,
method=self.method,
iir_params=self.iir_params,
picks=self.picks, n_jobs=self.n_jobs,
copy=False, verbose=False)
if self.l_freq is not None and self.h_freq is not None:
if self.l_freq < self.h_freq:
epochs_data = \
band_pass_filter(epochs_data, self.info['sfreq'],
self.l_freq, self.h_freq,
filter_length=self.filter_length,
l_trans_bandwidth=self.l_trans_bandwidth,
h_trans_bandwidth=self.h_trans_bandwidth,
method=self.method,
iir_params=self.iir_params,
picks=self.picks, n_jobs=self.n_jobs,
copy=False, verbose=False)
else:
epochs_data = \
band_stop_filter(epochs_data, self.info['sfreq'],
self.h_freq, self.l_freq,
filter_length=self.filter_length,
l_trans_bandwidth=self.h_trans_bandwidth,
h_trans_bandwidth=self.l_trans_bandwidth,
method=self.method,
iir_params=self.iir_params,
picks=self.picks, n_jobs=self.n_jobs,
copy=False, verbose=False)
return epochs_data
class UnsupervisedSpatialFilter(TransformerMixin, BaseEstimator):
"""Fit and transform with an unsupervised spatial filtering across time
and samples.
Parameters
----------
estimator : scikit-learn estimator
Estimator using some decomposition algorithm.
average : bool, defaults to False
If True, the estimator is fitted on the average across samples
(e.g. epochs).
"""
def __init__(self, estimator, average=False):
# XXX: Use _check_estimator #3381
for attr in ('fit', 'transform', 'fit_transform'):
if not hasattr(estimator, attr):
raise ValueError('estimator must be a scikit-learn '
'transformer, missing %s method' % attr)
if not isinstance(average, bool):
raise ValueError("average parameter must be of bool type, got "
"%s instead" % type(bool))
self.estimator = estimator
self.average = average
def fit(self, X, y=None):
"""Fit the spatial filters.
Parameters
----------
X : array, shape (n_epochs, n_channels, n_times)
The data to be filtered.
y : None | array, shape (n_samples,)
Used for scikit-learn compatibility.
Returns
-------
self : Instance of UnsupervisedSpatialFilter
Return the modified instance.
"""
if self.average:
X = np.mean(X, axis=0).T
else:
n_epochs, n_channels, n_times = X.shape
# trial as time samples
X = np.transpose(X, (1, 0, 2)).reshape((n_channels, n_epochs *
n_times)).T
self.estimator.fit(X)
return self
def fit_transform(self, X, y=None):
"""Transform the data to its filtered components after fitting.
Parameters
----------
X : array, shape (n_epochs, n_channels, n_times)
The data to be filtered.
y : None | array, shape (n_samples,)
Used for scikit-learn compatibility.
Returns
-------
X : array, shape (n_trials, n_channels, n_times)
The transformed data.
"""
return self.fit(X).transform(X)
def transform(self, X):
"""Transform the data to its spatial filters.
Parameters
----------
X : array, shape (n_epochs, n_channels, n_times)
The data to be filtered.
Returns
-------
X : array, shape (n_trials, n_channels, n_times)
The transformed data.
"""
n_epochs, n_channels, n_times = X.shape
# trial as time samples
X = np.transpose(X, [1, 0, 2]).reshape([n_channels, n_epochs *
n_times]).T
X = self.estimator.transform(X)
X = np.reshape(X.T, [-1, n_epochs, n_times]).transpose([1, 0, 2])
return X
class TemporalFilter(TransformerMixin):
"""Estimator to filter data array along the last dimension.
Applies a zero-phase low-pass, high-pass, band-pass, or band-stop
filter to the channels.
l_freq and h_freq are the frequencies below which and above which,
respectively, to filter out of the data. Thus the uses are:
- l_freq < h_freq: band-pass filter
- l_freq > h_freq: band-stop filter
- l_freq is not None, h_freq is None: low-pass filter
- l_freq is None, h_freq is not None: high-pass filter
See ``mne.filter.filter_data``.
Parameters
----------
l_freq : float | None
Low cut-off frequency in Hz. If None the data are only low-passed.
h_freq : float | None
High cut-off frequency in Hz. If None the data are only
high-passed.
sfreq : float, defaults to 1.0
Sampling frequency in Hz.
filter_length : str | int, defaults to 'auto'
Length of the FIR filter to use (if applicable):
* int: specified length in samples.
* 'auto' (default in 0.14): the filter length is chosen based
on the size of the transition regions (7 times the reciprocal
of the shortest transition band).
* str: (default in 0.13 is "10s") a human-readable time in
units of "s" or "ms" (e.g., "10s" or "5500ms") will be
converted to that number of samples if ``phase="zero"``, or
the shortest power-of-two length at least that duration for
``phase="zero-double"``.
l_trans_bandwidth : float | str, defaults to 'auto'
Width of the transition band at the low cut-off frequency in Hz
(high pass or cutoff 1 in bandpass). Can be "auto"
(default in 0.14) to use a multiple of ``l_freq``::
min(max(l_freq * 0.25, 2), l_freq)
Only used for ``method='fir'``.
h_trans_bandwidth : float | str, defaults to 'auto'
Width of the transition band at the high cut-off frequency in Hz
(low pass or cutoff 2 in bandpass). Can be "auto"
(default in 0.14) to use a multiple of ``h_freq``::
min(max(h_freq * 0.25, 2.), info['sfreq'] / 2. - h_freq)
Only used for ``method='fir'``.
n_jobs : int | str, defaults to 1
Number of jobs to run in parallel. Can be 'cuda' if scikits.cuda
is installed properly, CUDA is initialized, and method='fft'.
method : str, defaults to 'fir'
'fir' will use overlap-add FIR filtering, 'iir' will use IIR
forward-backward filtering (via filtfilt).
iir_params : dict | None, defaults to None
Dictionary of parameters to use for IIR filtering.
See mne.filter.construct_iir_filter for details. If iir_params
is None and method="iir", 4th order Butterworth will be used.
fir_window : str, defaults to 'hamming'
The window to use in FIR design, can be "hamming", "hann",
or "blackman".
verbose : bool, str, int, or None, defaults to None
If not None, override default verbose level (see mne.verbose).
Defaults to self.verbose.
See Also
--------
FilterEstimator
Vectorizer
mne.filter.band_pass_filter
mne.filter.band_stop_filter
mne.filter.low_pass_filter
mne.filter.high_pass_filter
"""
def __init__(self, l_freq=None, h_freq=None, sfreq=1.0,
filter_length='auto', l_trans_bandwidth='auto',
h_trans_bandwidth='auto', n_jobs=1, method='fir',
iir_params=None, fir_window='hamming', verbose=None):
self.l_freq = l_freq
self.h_freq = h_freq
self.sfreq = sfreq
self.filter_length = filter_length
self.l_trans_bandwidth = l_trans_bandwidth
self.h_trans_bandwidth = h_trans_bandwidth
self.n_jobs = n_jobs
self.method = method
self.iir_params = iir_params
self.fir_window = fir_window
self.verbose = verbose
if not isinstance(self.n_jobs, int) and self.n_jobs == 'cuda':
raise ValueError('n_jobs must be int or "cuda", got %s instead.'
% type(self.n_jobs))
def fit(self, X, y=None):
"""Does nothing. For scikit-learn compatibility purposes.
Parameters
----------
X : array, shape (n_epochs, n_channels, n_times) or or shape (n_channels, n_times) # noqa
The data to be filtered over the last dimension. The channels
dimension can be zero when passing a 2D array.
y : None
Not used, for scikit-learn compatibility issues.
Returns
-------
self : instance of Filterer
Returns the modified instance.
"""
return self
def transform(self, X):
"""Filters data along the last dimension.
Parameters
----------
X : array, shape (n_epochs, n_channels, n_times) or shape (n_channels, n_times) # noqa
The data to be filtered over the last dimension. The channels
dimension can be zero when passing a 2D array.
Returns
-------
X : array, shape is same as used in input.
The data after filtering.
"""
X = np.atleast_2d(X)
if X.ndim > 3:
raise ValueError("Array must be of at max 3 dimensions instead "
"got %s dimensional matrix" % (X.ndim))
shape = X.shape
X = X.reshape(-1, shape[-1])
(X, self.sfreq, self.l_freq, self.h_freq, self.l_trans_bandwidth,
self.h_trans_bandwidth, self.filter_length, _, self.fir_window) = \
_triage_filter_params(X, self.sfreq, self.l_freq, self.h_freq,
self.l_trans_bandwidth,
self.h_trans_bandwidth, self.filter_length,
self.method, phase='zero',
fir_window=self.fir_window)
X = filter_data(X, self.sfreq, self.l_freq, self.h_freq,
filter_length=self.filter_length,
l_trans_bandwidth=self.l_trans_bandwidth,
h_trans_bandwidth=self.h_trans_bandwidth,
n_jobs=self.n_jobs, method=self.method,
iir_params=self.iir_params, copy=False,
fir_window=self.fir_window,
verbose=self.verbose)
return X.reshape(shape)
| 37.249428
| 97
| 0.566378
|
835ced0050372441aaebf4898848d03008a5c4d1
| 3,445
|
py
|
Python
|
machine_learning_course/lab03.py
|
wojdzi1607/MachineLearningCourse
|
0b883a00c51146863351861c48ed81e571e942c4
|
[
"MIT"
] | null | null | null |
machine_learning_course/lab03.py
|
wojdzi1607/MachineLearningCourse
|
0b883a00c51146863351861c48ed81e571e942c4
|
[
"MIT"
] | null | null | null |
machine_learning_course/lab03.py
|
wojdzi1607/MachineLearningCourse
|
0b883a00c51146863351861c48ed81e571e942c4
|
[
"MIT"
] | null | null | null |
import numpy as np
import matplotlib.pyplot as plt
from sklearn import datasets
from sklearn import model_selection
from sklearn import metrics
from sklearn import preprocessing
from sklearn import svm
from sklearn import linear_model
from sklearn import tree
from sklearn import ensemble
from mlxtend.plotting import plot_decision_regions
def plot_iris(X: np.ndarray) -> None:
# Wizualizujemy tylko dwie pierwsze cechy – aby móc je przedstawić bez problemu w 2D.
plt.figure()
plt.scatter(X[:, 0], X[:, 1])
plt.axvline(x=0)
plt.axhline(y=0)
plt.title('Iris sepal features')
plt.xlabel('sepal length (cm)')
plt.ylabel('sepal width (cm)')
def TODO1():
iris = datasets.load_iris()
# iris = datasets.load_iris(as_frame=True)
# print(iris.frame.describe())
X, y = iris.data, iris.target
print(f'count y : {np.bincount(y)}')
X_train, X_test, y_train, y_test = model_selection.train_test_split(X, y, test_size=0.25, stratify=y)
print(f'county y_train: {np.bincount(y_train)}')
print(f'count y_test : {np.bincount(y_test)}')
plot_iris(X)
min_max_scaler = preprocessing.MinMaxScaler()
min_max_scaler.fit(X_train)
X_min_max_scaler = min_max_scaler.transform(X)
scaler = preprocessing.StandardScaler()
scaler.fit(X_train)
X_standard_scaler = scaler.transform(X)
plot_iris(X_min_max_scaler)
plot_iris(X_standard_scaler)
plt.show()
def TODO2():
iris = datasets.load_iris()
X, y = iris.data, iris.target
X = X[:, [0, 1]]
X_train, X_test, y_train, y_test = model_selection.train_test_split(X, y, test_size=0.25, stratify=y, random_state=42)
min_max_scaler = preprocessing.MinMaxScaler()
min_max_scaler.fit(X_train)
X_train = min_max_scaler.transform(X_train)
X_test = min_max_scaler.transform(X_test)
# classificators
clf_svm = svm.SVC(random_state=42)
clf_svm.fit(X_train, y_train)
acc_clf_svm = metrics.accuracy_score(y_test, clf_svm.predict(X_test))
print(f'acc_clf_svm: {acc_clf_svm}')
clf_linear = linear_model.LogisticRegression(random_state=42)
clf_linear.fit(X_train, y_train)
acc_clf_linear = metrics.accuracy_score(y_test, clf_linear.predict(X_test))
print(f'acc_clf_linear: {acc_clf_linear}')
clf_tree = tree.DecisionTreeClassifier(random_state=42)
clf_tree.fit(X_train, y_train)
acc_clf_tree = metrics.accuracy_score(y_test, clf_tree.predict(X_test))
print(f'acc_clf_tree: {acc_clf_tree}')
clf_rf = ensemble.RandomForestClassifier(random_state=42)
clf_rf.fit(X_train, y_train)
acc_clf_rf = metrics.accuracy_score(y_test, clf_rf.predict(X_test))
print(f'acc_clf_rf: {acc_clf_rf}')
# plt.figure()
# plot_decision_regions(X_test, y_test, clf_svm, legend=2)
# plt.figure()
# plot_decision_regions(X_test, y_test, clf_linear, legend=2)
# plt.figure()
# plot_decision_regions(X_test, y_test, clf_tree, legend=2)
# plt.figure()
# plot_decision_regions(X_test, y_test, clf_rf, legend=2)
#
# plt.show()
param_grid = [
{'C': [1, 10, 100, 1000], 'kernel': ['linear']},
{'C': [1, 10, 100, 1000], 'gamma': [0.001, 0.0001], 'kernel': ['rbf']},
]
clf_gs = model_selection.GridSearchCV(estimator=svm.SVC(), param_grid=param_grid, n_jobs=4)
clf_gs.fit(X_train, y_train)
print(clf_gs.cv_results_)
def main():
TODO2()
if __name__ == '__main__':
main()
| 30.219298
| 122
| 0.701597
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.