hexsha
stringlengths 40
40
| size
int64 4
1.02M
| ext
stringclasses 8
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
209
| max_stars_repo_name
stringlengths 5
121
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
209
| max_issues_repo_name
stringlengths 5
121
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
209
| max_forks_repo_name
stringlengths 5
121
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 4
1.02M
| avg_line_length
float64 1.07
66.1k
| max_line_length
int64 4
266k
| alphanum_fraction
float64 0.01
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
df75c7f45e2493a860ede35367ed9150664866bf
| 2,896
|
py
|
Python
|
homeassistant/components/alarm_control_panel/reproduce_state.py
|
KJonline/core
|
a1eaff43436e9769109906ca892359de946ac657
|
[
"Apache-2.0"
] | null | null | null |
homeassistant/components/alarm_control_panel/reproduce_state.py
|
KJonline/core
|
a1eaff43436e9769109906ca892359de946ac657
|
[
"Apache-2.0"
] | null | null | null |
homeassistant/components/alarm_control_panel/reproduce_state.py
|
KJonline/core
|
a1eaff43436e9769109906ca892359de946ac657
|
[
"Apache-2.0"
] | null | null | null |
"""Reproduce an Alarm control panel state."""
from __future__ import annotations
import asyncio
import logging
from typing import Any, Iterable
from homeassistant.const import (
ATTR_ENTITY_ID,
SERVICE_ALARM_ARM_AWAY,
SERVICE_ALARM_ARM_CUSTOM_BYPASS,
SERVICE_ALARM_ARM_HOME,
SERVICE_ALARM_ARM_NIGHT,
SERVICE_ALARM_ARM_VACATION,
SERVICE_ALARM_DISARM,
SERVICE_ALARM_TRIGGER,
STATE_ALARM_ARMED_AWAY,
STATE_ALARM_ARMED_CUSTOM_BYPASS,
STATE_ALARM_ARMED_HOME,
STATE_ALARM_ARMED_NIGHT,
STATE_ALARM_ARMED_VACATION,
STATE_ALARM_DISARMED,
STATE_ALARM_TRIGGERED,
)
from homeassistant.core import Context, HomeAssistant, State
from . import DOMAIN
_LOGGER = logging.getLogger(__name__)
VALID_STATES = {
STATE_ALARM_ARMED_AWAY,
STATE_ALARM_ARMED_CUSTOM_BYPASS,
STATE_ALARM_ARMED_HOME,
STATE_ALARM_ARMED_NIGHT,
STATE_ALARM_ARMED_VACATION,
STATE_ALARM_DISARMED,
STATE_ALARM_TRIGGERED,
}
async def _async_reproduce_state(
hass: HomeAssistant,
state: State,
*,
context: Context | None = None,
reproduce_options: dict[str, Any] | None = None,
) -> None:
"""Reproduce a single state."""
cur_state = hass.states.get(state.entity_id)
if cur_state is None:
_LOGGER.warning("Unable to find entity %s", state.entity_id)
return
if state.state not in VALID_STATES:
_LOGGER.warning(
"Invalid state specified for %s: %s", state.entity_id, state.state
)
return
# Return if we are already at the right state.
if cur_state.state == state.state:
return
service_data = {ATTR_ENTITY_ID: state.entity_id}
if state.state == STATE_ALARM_ARMED_AWAY:
service = SERVICE_ALARM_ARM_AWAY
elif state.state == STATE_ALARM_ARMED_CUSTOM_BYPASS:
service = SERVICE_ALARM_ARM_CUSTOM_BYPASS
elif state.state == STATE_ALARM_ARMED_HOME:
service = SERVICE_ALARM_ARM_HOME
elif state.state == STATE_ALARM_ARMED_NIGHT:
service = SERVICE_ALARM_ARM_NIGHT
elif state.state == STATE_ALARM_ARMED_VACATION:
service = SERVICE_ALARM_ARM_VACATION
elif state.state == STATE_ALARM_DISARMED:
service = SERVICE_ALARM_DISARM
elif state.state == STATE_ALARM_TRIGGERED:
service = SERVICE_ALARM_TRIGGER
await hass.services.async_call(
DOMAIN, service, service_data, context=context, blocking=True
)
async def async_reproduce_states(
hass: HomeAssistant,
states: Iterable[State],
*,
context: Context | None = None,
reproduce_options: dict[str, Any] | None = None,
) -> None:
"""Reproduce Alarm control panel states."""
await asyncio.gather(
*(
_async_reproduce_state(
hass, state, context=context, reproduce_options=reproduce_options
)
for state in states
)
)
| 27.846154
| 81
| 0.704765
|
2875e7e3385b8736891e0cc95f01c96f6c22c396
| 1,642
|
py
|
Python
|
data/p2DJ/New/program/cirq/startCirq84.py
|
UCLA-SEAL/QDiff
|
d968cbc47fe926b7f88b4adf10490f1edd6f8819
|
[
"BSD-3-Clause"
] | null | null | null |
data/p2DJ/New/program/cirq/startCirq84.py
|
UCLA-SEAL/QDiff
|
d968cbc47fe926b7f88b4adf10490f1edd6f8819
|
[
"BSD-3-Clause"
] | null | null | null |
data/p2DJ/New/program/cirq/startCirq84.py
|
UCLA-SEAL/QDiff
|
d968cbc47fe926b7f88b4adf10490f1edd6f8819
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 5/15/20 4:49 PM
# @File : grover.py
# qubit number=2
# total number=7
import cirq
import cirq.google as cg
from typing import Optional
import sys
from math import log2
import numpy as np
#thatsNoCode
from cirq.contrib.svg import SVGCircuit
# Symbols for the rotation angles in the QAOA circuit.
def make_circuit(n: int, input_qubit):
c = cirq.Circuit() # circuit begin
c.append(cirq.H.on(input_qubit[0])) # number=1
c.append(cirq.Y.on(input_qubit[1])) # number=2
c.append(cirq.Y.on(input_qubit[1])) # number=4
c.append(cirq.Y.on(input_qubit[1])) # number=3
c.append(cirq.SWAP.on(input_qubit[1],input_qubit[0])) # number=5
c.append(cirq.SWAP.on(input_qubit[1],input_qubit[0])) # number=6
# circuit end
c.append(cirq.measure(*input_qubit, key='result'))
return c
def bitstring(bits):
return ''.join(str(int(b)) for b in bits)
if __name__ == '__main__':
qubit_count = 4
input_qubits = [cirq.GridQubit(i, 0) for i in range(qubit_count)]
circuit = make_circuit(qubit_count,input_qubits)
circuit = cg.optimized_for_sycamore(circuit, optimizer_type='sqrt_iswap')
circuit_sample_count =2000
simulator = cirq.Simulator()
result = simulator.run(circuit, repetitions=circuit_sample_count)
frequencies = result.histogram(key='result', fold_func=bitstring)
writefile = open("../data/startCirq84.csv","w+")
print(format(frequencies),file=writefile)
print("results end", file=writefile)
print(circuit.__len__(), file=writefile)
print(circuit,file=writefile)
writefile.close()
| 26.918033
| 77
| 0.696102
|
ba2461b9a24085ead6709d32ae139a5295c476ad
| 166
|
py
|
Python
|
guardiancl/device.py
|
guardiaocl/guardiaocl-servers
|
86c203656c2e9d1844a17963a4e5c2239b4eda1a
|
[
"Apache-2.0"
] | null | null | null |
guardiancl/device.py
|
guardiaocl/guardiaocl-servers
|
86c203656c2e9d1844a17963a4e5c2239b4eda1a
|
[
"Apache-2.0"
] | null | null | null |
guardiancl/device.py
|
guardiaocl/guardiaocl-servers
|
86c203656c2e9d1844a17963a4e5c2239b4eda1a
|
[
"Apache-2.0"
] | null | null | null |
class Device:
serial = ""
description = ""
def __init__(self, serial, description):
self.serial = serial
self.description = description
| 18.444444
| 44
| 0.614458
|
2bedeb4aaf31855781910b52987ca36e72b55bd2
| 1,572
|
py
|
Python
|
src/library/blas/AutoGemm/AutoGemm.py
|
tingxingdong/clean
|
9cdaed4c755b825b0c10a99f9974224993aa39a9
|
[
"Apache-2.0"
] | 1
|
2021-07-07T11:28:56.000Z
|
2021-07-07T11:28:56.000Z
|
src/library/blas/AutoGemm/AutoGemm.py
|
tingxingdong/clean
|
9cdaed4c755b825b0c10a99f9974224993aa39a9
|
[
"Apache-2.0"
] | null | null | null |
src/library/blas/AutoGemm/AutoGemm.py
|
tingxingdong/clean
|
9cdaed4c755b825b0c10a99f9974224993aa39a9
|
[
"Apache-2.0"
] | null | null | null |
################################################################################
# AutoGemm
# - Automatically generate gemm kernels based on tile parameters
# - This script generates the following to ease integration into clBLAS:
# - generate all the kernel files
# - kernel selection logic
# - include files for kernel strings
#
# TODO Now
# - offline compilation
# TODO Future
# - fuse together unroll=8 and unroll=1 in same kernel ?
# functionally works fine, but lowers performance by ~10%
################################################################################
import os
import sys
import argparse
import getopt
import Common
import Includes
import KernelSelection
import KernelOpenCL
################################################################################
# Main
################################################################################
if __name__ == "__main__":
# parse arguments
ap = argparse.ArgumentParser(description="AutoGemm")
ap.add_argument("--output-path", dest="output" )
ap.add_argument("--opencl-compiler-version", dest="clCompilerVersion", action="store", choices=["1.1", "1.2", "2.0" ])
args = ap.parse_args()
if args.output:
Common.setOutputPath(args.output)
else:
print "AutoGemm.py: Warning: No output path specified; default is working directory."
print "AutoGemm.py: using OpenCL " + args.clCompilerVersion + " compiler"
Common.setClCompilerVersion(args.clCompilerVersion)
KernelOpenCL.writeOpenCLKernels()
KernelSelection.writeKernelSelection()
Includes.writeIncludes()
| 32.75
| 120
| 0.592239
|
8fbf7dfbfe2a43bef8255eb9ed7763fc1ea34514
| 26
|
py
|
Python
|
packages/simcore-sdk/src/simcore_sdk/node_data/__init__.py
|
KZzizzle/osparc-simcore
|
981bc8d193f3f5d507e3225f857e0308c339e163
|
[
"MIT"
] | null | null | null |
packages/simcore-sdk/src/simcore_sdk/node_data/__init__.py
|
KZzizzle/osparc-simcore
|
981bc8d193f3f5d507e3225f857e0308c339e163
|
[
"MIT"
] | 17
|
2020-10-15T16:06:05.000Z
|
2022-03-21T18:48:21.000Z
|
packages/simcore-sdk/src/simcore_sdk/node_data/__init__.py
|
GitHK/osparc-simcore-forked
|
5b01a28d1b8028afcf9a735e1d46a73daa13686e
|
[
"MIT"
] | null | null | null |
from . import data_manager
| 26
| 26
| 0.846154
|
f69c20ae3b98dd5477e055580f3d2133d7906e57
| 952
|
py
|
Python
|
test/testserver.py
|
pymacaron/pymacaron
|
884b68722bdd2ab90ad645848f300a73420ada1d
|
[
"BSD-2-Clause"
] | 27
|
2018-08-08T09:40:47.000Z
|
2022-03-11T15:55:48.000Z
|
test/testserver.py
|
pymacaron/pymacaron
|
884b68722bdd2ab90ad645848f300a73420ada1d
|
[
"BSD-2-Clause"
] | 9
|
2019-07-23T08:05:06.000Z
|
2019-12-16T12:07:32.000Z
|
test/testserver.py
|
pymacaron/pymacaron
|
884b68722bdd2ab90ad645848f300a73420ada1d
|
[
"BSD-2-Clause"
] | 1
|
2020-06-16T13:47:04.000Z
|
2020-06-16T13:47:04.000Z
|
#!/usr/bin/env python
import os
import sys
from pymacaron.log import pymlogger
import json
from flask import Flask
sys.path.append(os.path.join(os.path.dirname(os.path.abspath(__file__)), '..'))
from pymacaron import API, letsgo
log = pymlogger(__name__)
app = Flask(__name__)
def test_crash_reporter(msg, body):
tmpdir = '/tmp/test-pym-microservice'
try:
os.stat(tmpdir)
except Exception:
os.mkdir(tmpdir)
data = {
'title': msg,
'body': json.loads(body),
}
log.info("Storing crash report into %s/error_report.json" % tmpdir)
with open(os.path.join(tmpdir, "error_report.json"), "a+") as f:
f.write(json.dumps(data))
def start(port, debug):
api = API(
app,
port=8765,
debug=False,
error_reporter=test_crash_reporter,
)
api.load_apis('.', include_crash_api=True)
api.start(serve="crash")
letsgo(__name__, callback=start)
| 19.04
| 79
| 0.643908
|
db4b0c144c5ef281beeeac024c50cc5c9a833d13
| 4,930
|
py
|
Python
|
crabageprediction/venv/Lib/site-packages/mpl_toolkits/axisartist/axisline_style.py
|
13rianlucero/CrabAgePrediction
|
92bc7fbe1040f49e820473e33cc3902a5a7177c7
|
[
"MIT"
] | 7
|
2022-01-16T12:28:16.000Z
|
2022-03-04T15:31:45.000Z
|
crabageprediction/venv/Lib/site-packages/mpl_toolkits/axisartist/axisline_style.py
|
13rianlucero/CrabAgePrediction
|
92bc7fbe1040f49e820473e33cc3902a5a7177c7
|
[
"MIT"
] | 5
|
2022-01-16T10:08:41.000Z
|
2022-01-20T05:34:09.000Z
|
crabageprediction/venv/Lib/site-packages/mpl_toolkits/axisartist/axisline_style.py
|
13rianlucero/CrabAgePrediction
|
92bc7fbe1040f49e820473e33cc3902a5a7177c7
|
[
"MIT"
] | 4
|
2022-02-04T22:58:27.000Z
|
2022-02-14T19:29:18.000Z
|
import math
import numpy as np
from matplotlib.patches import _Style, FancyArrowPatch
from matplotlib.transforms import IdentityTransform
from matplotlib.path import Path
class _FancyAxislineStyle:
class SimpleArrow(FancyArrowPatch):
"""The artist class that will be returned for SimpleArrow style."""
_ARROW_STYLE = "->"
def __init__(self, axis_artist, line_path, transform,
line_mutation_scale):
self._axis_artist = axis_artist
self._line_transform = transform
self._line_path = line_path
self._line_mutation_scale = line_mutation_scale
FancyArrowPatch.__init__(self,
path=self._line_path,
arrowstyle=self._ARROW_STYLE,
patchA=None,
patchB=None,
shrinkA=0.,
shrinkB=0.,
mutation_scale=line_mutation_scale,
mutation_aspect=None,
transform=IdentityTransform(),
)
def set_line_mutation_scale(self, scale):
self.set_mutation_scale(scale*self._line_mutation_scale)
def _extend_path(self, path, mutation_size=10):
"""
Extend the path to make a room for drawing arrow.
"""
(x0, y0), (x1, y1) = path.vertices[-2:]
theta = math.atan2(y1 - y0, x1 - x0)
x2 = x1 + math.cos(theta) * mutation_size
y2 = y1 + math.sin(theta) * mutation_size
if path.codes is None:
return Path(np.concatenate([path.vertices, [[x2, y2]]]))
else:
return Path(np.concatenate([path.vertices, [[x2, y2]]]),
np.concatenate([path.codes, [Path.LINETO]]))
def set_path(self, path):
self._line_path = path
def draw(self, renderer):
"""
Draw the axis line.
1) transform the path to the display coordinate.
2) extend the path to make a room for arrow
3) update the path of the FancyArrowPatch.
4) draw
"""
path_in_disp = self._line_transform.transform_path(self._line_path)
mutation_size = self.get_mutation_scale() # line_mutation_scale()
extended_path = self._extend_path(path_in_disp,
mutation_size=mutation_size)
self._path_original = extended_path
FancyArrowPatch.draw(self, renderer)
class FilledArrow(SimpleArrow):
"""The artist class that will be returned for SimpleArrow style."""
_ARROW_STYLE = "-|>"
class AxislineStyle(_Style):
"""
A container class which defines style classes for AxisArtists.
An instance of any axisline style class is an callable object,
whose call signature is ::
__call__(self, axis_artist, path, transform)
When called, this should return an `.Artist` with the following methods::
def set_path(self, path):
# set the path for axisline.
def set_line_mutation_scale(self, scale):
# set the scale
def draw(self, renderer):
# draw
"""
_style_list = {}
class _Base:
# The derived classes are required to be able to be initialized
# w/o arguments, i.e., all its argument (except self) must have
# the default values.
def __init__(self):
"""
initialization.
"""
super().__init__()
def __call__(self, axis_artist, transform):
"""
Given the AxisArtist instance, and transform for the path (set_path
method), return the Matplotlib artist for drawing the axis line.
"""
return self.new_line(axis_artist, transform)
class SimpleArrow(_Base):
"""
A simple arrow.
"""
ArrowAxisClass = _FancyAxislineStyle.SimpleArrow
def __init__(self, size=1):
"""
Parameters
----------
size : float
Size of the arrow as a fraction of the ticklabel size.
"""
self.size = size
super().__init__()
def new_line(self, axis_artist, transform):
linepath = Path([(0, 0), (0, 1)])
axisline = self.ArrowAxisClass(axis_artist, linepath, transform,
line_mutation_scale=self.size)
return axisline
_style_list["->"] = SimpleArrow
class FilledArrow(SimpleArrow):
ArrowAxisClass = _FancyAxislineStyle.FilledArrow
_style_list["-|>"] = FilledArrow
| 33.767123
| 79
| 0.546653
|
0d1f787b4873449cef1d90a3cbe90464999f4ad5
| 37
|
py
|
Python
|
src/lib/rexec.py
|
DTenore/skulpt
|
098d20acfb088d6db85535132c324b7ac2f2d212
|
[
"MIT"
] | 2,671
|
2015-01-03T08:23:25.000Z
|
2022-03-31T06:15:48.000Z
|
src/lib/rexec.py
|
wakeupmuyunhe/skulpt
|
a8fb11a80fb6d7c016bab5dfe3712517a350b347
|
[
"MIT"
] | 972
|
2015-01-05T08:11:00.000Z
|
2022-03-29T13:47:15.000Z
|
src/lib/rexec.py
|
wakeupmuyunhe/skulpt
|
a8fb11a80fb6d7c016bab5dfe3712517a350b347
|
[
"MIT"
] | 845
|
2015-01-03T19:53:36.000Z
|
2022-03-29T18:34:22.000Z
|
import _sk_fail; _sk_fail._("rexec")
| 18.5
| 36
| 0.756757
|
6b81f33302f528c1b23be0003b7a3e9714f28d72
| 1,356
|
py
|
Python
|
Phase2/Backup_Files/create.py
|
skaefer143/291-Project2
|
08a6751c7742722af3db7e091770df79cf94d538
|
[
"MIT"
] | null | null | null |
Phase2/Backup_Files/create.py
|
skaefer143/291-Project2
|
08a6751c7742722af3db7e091770df79cf94d538
|
[
"MIT"
] | null | null | null |
Phase2/Backup_Files/create.py
|
skaefer143/291-Project2
|
08a6751c7742722af3db7e091770df79cf94d538
|
[
"MIT"
] | 1
|
2019-09-17T00:21:22.000Z
|
2019-09-17T00:21:22.000Z
|
# CMPUT 291 - Mini Project 2
# Group 13 - Ken Li, Noah Kryzanowski, Storm Kaefer
# Phase 2 - Create index files
# Last Change By: Storm
# Time Changed:
# ----
# With help from https://www.cyberciti.biz/faq/python-execute-unix-linux-command-examples/
# ----
# Filenames must be indexedTweets.txt, indexedTerms.txt, and indexedDates.txt for program to work
# ----
import subprocess
def dateDB():
subprocess.call(["db_load", "-c", "dupsort=1", "-f", dateFilename, "-T", "-t", "btree", "da.idx"]) # -h for sorting by the id key at start, -u for unique entries
# Outputs db_load command to da.idx
# The -c dupsort=1 argument makes sure that duplicates are in the database and sorted
return
def termsDB():
subprocess.call(["db_load", "-c", "dupsort=1", "-f", termsFilename, "-T", "-t", "btree", "te.idx"]) # -h for sorting by the id key at start, -u for unique entries
# Outputs db_load command to te.idx
# The -c dupsort=1 argument makes sure that duplicates are in the database and sorted
return
def tweetDB():
subprocess.call(["db_load", "-f", tweetsFilename, "-T", "-t", "hash", "tw.idx"])
# Outputs db_load command to tw.idx
return
tweetsFilename = "indexedTweets.txt"
termsFilename = "indexedTerms.txt"
dateFilename = "indexedDates.txt"
tweetDB()
termsDB()
dateDB()
print("--- Database Created. (This message will always display)")
exit()
| 33.9
| 163
| 0.699115
|
b1ff6f092fdf28a86f168d9c6f8b262b24bda329
| 7,672
|
py
|
Python
|
QGL/BasicSequences/StarkShift.py
|
gribeill/QGL
|
ce96caac94b9a5ee621a9bb9c56a14bf23c750e5
|
[
"Apache-2.0"
] | null | null | null |
QGL/BasicSequences/StarkShift.py
|
gribeill/QGL
|
ce96caac94b9a5ee621a9bb9c56a14bf23c750e5
|
[
"Apache-2.0"
] | null | null | null |
QGL/BasicSequences/StarkShift.py
|
gribeill/QGL
|
ce96caac94b9a5ee621a9bb9c56a14bf23c750e5
|
[
"Apache-2.0"
] | null | null | null |
from ..PulsePrimitives import *
from ..Compiler import compile_to_hardware
from ..ChannelLibraries import EdgeFactory
from ..PulseSequencePlotter import plot_pulse_files
from .helpers import create_cal_seqs, delay_descriptor, cal_descriptor
import numpy as np
from collections.abc import Iterable
from itertools import product
def StarkSpectroscopy(qubit, measurement, amplitude,
delay=200e-9, length=1e-6, showPlot=False):
"""
Stark shift spectroscopy experiment. Applies a coherent displacement
to the qubit readout cavity while doing pulsed spectroscopy.
Parameters
----------
qubit : Channels.LogicalChannel
Logical channel for the control qubit
measurement : bbndb.qgl.Measurement
Measurement channel to apply displacement pulse to
amplitude : float
Amplitude of the measurement pulse. Valid range: [0.0, 1.0].
delay : float, optional
Delay between end of spectroscopy pulse and start of MEAS (seconds)
lengths : int/float, optional
Total length of cavity displacement pulse (seconds). 4 ns minimum.
showPlot : boolean, optional
Whether to plot
Returns
-------
metafile : string
Path to a json metafile with details about the sequences and paths
to compiled machine files.
Examples
--------
>>> mf = StarkSpectroscopy(q1, q1.measure_chan, np.linspace(0.6, 0.8, 51));
Compiled 51 sequences.
>>> mf
'/path/to/exp/exp-meta.json'
"""
if not isinstance(amplitude, Iterable):
amplitude = [amplitude]
def stark_shift_pulse(amp):
pump_pulse = Utheta(measurement, amp=amp, length=length)
l1 = length - delay - qubit.pulse_params["length"] - delay
spec_pulse = Id(qubit, length=l1)+X(qubit)+Id(qubit,length=delay)
return spec_pulse*pump_pulse
seqs = [[stark_shift_pulse(a), MEAS(qubit)] for a in amplitude]
axis_descriptor = [{
'name': 'Stark Shift Amplitude',
'unit': None,
'points': list(amplitude),
'partition': 1
}]
metafile = compile_to_hardware(seqs, 'StarkSpec/StarkSpec', axis_descriptor=axis_descriptor)
if showPlot:
plot_pulse_files(metafile)
return metafile
def StarkEcho(qubit, measurement, amplitudes, delays,
wait=200e-9, periods=4, showPlot=False):
"""
Hahn echo sequence with a coherent displacement of the qubit measurement
cavity. Used to measure photon-induced dephasing. This sequence can cause
a lot of cache pressure so number of points may be limited.
TODO: Use QGL intrinsics to reduce sequence and memory cache utilization.
Parameters
----------
qubit : Channels.LogicalChannel
Logical channel for the Hahn echo
measurement : bbndb.qgl.Measurement
Measurement channel of the qubit
amplitude : int/float iterable
Amplitude(s) of cavity displacement pulse. Valid range: [0.0, 1.0].
delays : int/float iterable
Delay between end of spectroscopy pulse and start of MEAS (seconds)
wait : int/float, optional
Hahn echo delays - the t in 90-t-180-t-180 (seconds)
(seconds). 4 ns minimum.
periods : int, optional
Number of artificial oscillations
showPlot : boolean, optional
Whether to plot
Returns
-------
metafile : string
Path to a json metafile with details about the sequences and paths
to compiled machine files.
Examples
--------
>>> mf = StarkEcho(q1, q1.measure_chan,
np.linspace(0.6, 0.8, 10),
np.linspace(20.0e-9, 200.02e-6, 10));
Compiled 210 sequences.
>>> mf
'/path/to/exp/exp-meta.json'
"""
if not isinstance(amplitudes, Iterable):
amplitudes = [amplitudes]
if not isinstance(delays, Iterable):
delays = [delays]
def echo_phase(n):
return 2*np.pi*periods/len(delays)*n
def echo_stark(n, amp, max_delay, meas_delay=200e-9):
x_len = qubit.pulse_params["length"]
max_len = 3*x_len + 2*max_delay + meas_delay
echo_wait = max_len - (3*x_len + 2*delays[n])
echo_seq = Id(qubit, echo_wait) + X90(qubit) + Id(qubit, delays[n]) + \
Y(qubit) + Id(qubit, delays[n]) + U90(qubit, echo_phase(n))
meas_seq = Utheta(measurement, amp=amp, length=max_len)
return echo_seq*meas_seq
seqs = [[echo_stark(n, amp, np.max(delays)), Id(measurement, length=wait), MEAS(qubit)]
for n, amp in product(range(len(delays)), amplitudes)]
axis_descriptor = [delay_descriptor(delays)] * len(amplitudes)
metafile = compile_to_hardware(seqs, 'StarkEcho/StarkEcho', axis_descriptor=axis_descriptor)
if showPlot:
plot_pulse_files(metafile)
return metafile
def CavityPumpProbe(qubit, measurement, offsets, amplitude,
length=1e-6, wait=2e-6, showPlot=False):
"""
Time resolved cavity spectroscopy. Applies a coherent displacement to qubit
readout cavity while sweeping qubit spectroscopy pulse delay. Useful to
measure cavity kappa and cavity population.
Parameters
----------
qubit : Channels.LogicalChannel
Logical channel for the Hahn echo
measurement : bbndb.qgl.Measurement
Measurement channel of the qubit
offsets : int/float iterable
Spectroscopy pulse offset relative to start of cavity displacement
pulse (seconds)
amplitude : int/float iterable
Amplitude(s) of cavity displacement pulse. Valid range: [0.0, 1.0].
length : int/float, optional
Total length of cavity displacement pulse (seconds)
wait : int/float, optional
Delay between end of cavity displacement pulse and start of measurement
(seconds). 4 ns minimum.
showPlot : boolean, optional
Whether to plot
Returns
-------
metafile : string
Path to a json metafile with details about the sequences and paths
to compiled machine files.
Examples
--------
>>> mf = CavityPumpProbe(q1, q1.measure_chan,
np.linspace(20.0e-9, 200.02e-6, 10),
0.6);
Compiled 210 sequences.
>>> mf
'/path/to/exp/exp-meta.json'
"""
if not isinstance(offsets, Iterable):
offsets = [offsets]
def cavity_probe(offset):
pump_pulse = Utheta(measurement, amp=amplitude, length=length)
x_len = qubit.pulse_params["length"]
if offset < -1*x_len:
return [X(qubit), Id(qubit, length=(-x_len-offset)), pump_pulse, Id(qubit, length=wait)]
elif offset < 0:
total_len = length-offset
pm = Id(measurement, length=offset)+pump_pulse
pq = X(qubit)+Id(qubit, length=(total_len-x_len))
return [pm*pq, Id(qubit, length=wait)]
elif offset < length:
pq = Id(qubit, length=offset)+X(qubit)+Id(qubit, length=(length-offset-x_len))
return [pump_pulse*pq, Id(qubit, length=wait)]
elif offset >= length:
assert offset < (length+wait), f"Wait time {wait} is too short!"
wait_len = wait - (offset-length+x_len)
return [pump_pulse, Id(qubit, length=(offset-length)), X(qubit), Id(qubit, length=wait_len)]
seqs = [[cavity_probe(off), MEAS(qubit)] for off in offsets]
axis_descriptor = [delay_descriptor(offsets)]
metafile = compile_to_hardware(seqs, 'CavityPumpProbe/CavityPumpProbe', axis_descriptor=axis_descriptor)
if showPlot:
plot_pulse_files(metafile)
return metafile
| 35.354839
| 108
| 0.646376
|
88ab896873af8ebf61da98d50368aa2d9d6cef4d
| 1,458
|
py
|
Python
|
tweak_param.py
|
Sim19/SimGEXPwMotifs
|
dbf8ac78bf694bc8647b343df44e010b25c96815
|
[
"MIT"
] | null | null | null |
tweak_param.py
|
Sim19/SimGEXPwMotifs
|
dbf8ac78bf694bc8647b343df44e010b25c96815
|
[
"MIT"
] | null | null | null |
tweak_param.py
|
Sim19/SimGEXPwMotifs
|
dbf8ac78bf694bc8647b343df44e010b25c96815
|
[
"MIT"
] | null | null | null |
# coding=utf-8
'''
functions for initializing delta and sigmasquared for datageneration
'''
#### PACKAGES ####
import numpy as np
import numpy.linalg as la
##################
#### FUNCTIONS ####
def eigVal(matrix):
eig = la.eigvals(matrix)
return eig
def tweakEigval(V_eig, Sigma_eig, M_eig, delta, frac, method=None):
# compute eigenvalue of M \otimes V
M_V_eig = np.array([ m*v for m in M_eig for v in V_eig])
# compute constant factor
if frac == 0:
return 0
elif frac == 1:
return np.sum(Sigma_eig) * delta
else:
const = frac/(1-frac) * delta
# run over different methods:
if method is None or method == 'mean_V_mean_M':
eigs = np.mean(Sigma_eig) / (np.mean(V_eig) * np.mean(M_eig))
elif method == 'mean_V_M':
eigs = np.mean(Sigma_eig) / (np.mean(M_V_eig))
elif method == 'median_V_M':
eigs = np.median(Sigma_eig) / (np.median(M_V_eig))
elif method == 'median_V_median_M':
eigs = np.median(Sigma_eig) / (np.median(V_eig) * np.median(M_eig))
elif method == 'trace':
eigs = np.sum(Sigma_eig) / np.sum(V_eig)
elif not isinstance(method, basestring):
raise ValueError('method is not a string')
else:
raise ValueError('method %s does not exist' %(method))
# compute sigma_beta2 as product of constant and eigenvalues
sigma_beta2 = const * eigs
return sigma_beta2.real
def checkRange(rsq, frac=0.2):
if (frac-0.01) < rsq < (frac+0.01):
return True
else:
return False
#########################
| 23.516129
| 69
| 0.661866
|
cb213c7464abbd61120152e3fa653524e50b50c9
| 4,661
|
py
|
Python
|
warre/tests/unit/api/v1/test_reservation.py
|
NeCTAR-RC/warre
|
4ca9bfaa6d0568cc8268d570b36c2c0fcb0f9d8e
|
[
"Apache-2.0"
] | 1
|
2021-08-16T14:50:35.000Z
|
2021-08-16T14:50:35.000Z
|
warre/tests/unit/api/v1/test_reservation.py
|
NeCTAR-RC/warre
|
4ca9bfaa6d0568cc8268d570b36c2c0fcb0f9d8e
|
[
"Apache-2.0"
] | null | null | null |
warre/tests/unit/api/v1/test_reservation.py
|
NeCTAR-RC/warre
|
4ca9bfaa6d0568cc8268d570b36c2c0fcb0f9d8e
|
[
"Apache-2.0"
] | null | null | null |
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
from unittest import mock
from warre.tests.unit import base
@mock.patch('warre.quota.get_enforcer', new=mock.Mock())
class TestReservationAPI(base.ApiTestCase):
def setUp(self):
super().setUp()
self.flavor = self.create_flavor()
def test_list_reservations(self):
self.create_reservation(flavor_id=self.flavor.id,
start=datetime.datetime(2021, 1, 1),
end=datetime.datetime(2021, 1, 2))
response = self.client.get('/v1/reservations/')
self.assert200(response)
results = response.get_json().get('results')
self.assertEqual(1, len(results))
def test_list_reservations_non_project(self):
self.create_reservation(flavor_id=self.flavor.id,
start=datetime.datetime(2021, 1, 1),
end=datetime.datetime(2021, 1, 2),
project_id="notmine")
response = self.client.get('/v1/reservations/')
self.assert200(response)
results = response.get_json().get('results')
self.assertEqual(0, len(results))
def test_create_resevation(self):
data = {'flavor_id': self.flavor.id, 'start': '2020-01-01 00:00',
'end': '2020-01-01 01:00'}
response = self.client.post('/v1/reservations/', json=data)
self.assert200(response)
self.assertEqual(1, response.get_json().get('instance_count'))
def test_create_resevation_multiple_instances(self):
data = {'flavor_id': self.flavor.id, 'start': '2020-01-01 00:00',
'end': '2020-01-01 01:00', 'instance_count': 2}
response = self.client.post('/v1/reservations/', json=data)
self.assert200(response)
self.assertEqual(2, response.get_json().get('instance_count'))
def test_create_resevation_noinput(self):
data = {}
response = self.client.post('/v1/reservations/', json=data)
self.assert400(response)
def test_create_resevation_bad_flavor(self):
data = {'flavor_id': 'bogus', 'start': '2020-01-01 00:00',
'end': '2020-02-02 00:00'}
response = self.client.post('/v1/reservations/', json=data)
self.assert404(response)
def test_create_resevation_missing_args(self):
data = {'flavor_id': self.flavor.id, 'start': '2020-01-01 00:00'}
response = self.client.post('/v1/reservations/', json=data)
self.assertStatus(response, 422)
class TestAdminReservationAPI(TestReservationAPI):
ROLES = ['admin']
def test_list_reservations_all_projects(self):
self.create_reservation(flavor_id=self.flavor.id,
start=datetime.datetime(2021, 1, 1),
end=datetime.datetime(2021, 1, 2),
project_id='123')
self.create_reservation(flavor_id=self.flavor.id,
start=datetime.datetime(2021, 1, 1),
end=datetime.datetime(2021, 1, 2),
project_id='987')
response = self.client.get('/v1/reservations/?all_projects=1')
self.assert200(response)
results = response.get_json().get('results')
self.assertEqual(2, len(results))
def test_list_reservations_other_project(self):
self.create_reservation(flavor_id=self.flavor.id,
start=datetime.datetime(2021, 1, 1),
end=datetime.datetime(2021, 1, 2),
project_id='123')
self.create_reservation(flavor_id=self.flavor.id,
start=datetime.datetime(2021, 1, 1),
end=datetime.datetime(2021, 1, 2),
project_id='987')
response = self.client.get('/v1/reservations/?all_projects=1')
self.assert200(response)
results = response.get_json().get('results')
self.assertEqual(2, len(results))
| 41.616071
| 78
| 0.599657
|
d281f6a610b7ccb4ccd560f5d1e452c5e526866f
| 8,112
|
py
|
Python
|
nova/api/openstack/compute/views/servers.py
|
bopopescu/nova_vmware_compute_driver
|
60d3936b68030647b9f11970c9e0d060fc286dd9
|
[
"Apache-2.0"
] | null | null | null |
nova/api/openstack/compute/views/servers.py
|
bopopescu/nova_vmware_compute_driver
|
60d3936b68030647b9f11970c9e0d060fc286dd9
|
[
"Apache-2.0"
] | null | null | null |
nova/api/openstack/compute/views/servers.py
|
bopopescu/nova_vmware_compute_driver
|
60d3936b68030647b9f11970c9e0d060fc286dd9
|
[
"Apache-2.0"
] | 2
|
2019-07-08T22:12:35.000Z
|
2020-07-24T08:27:24.000Z
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010-2011 OpenStack LLC.
# Copyright 2011 Piston Cloud Computing, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import hashlib
from nova.api.openstack import common
from nova.api.openstack.compute.views import addresses as views_addresses
from nova.api.openstack.compute.views import flavors as views_flavors
from nova.api.openstack.compute.views import images as views_images
from nova.openstack.common import log as logging
from nova.openstack.common import timeutils
LOG = logging.getLogger(__name__)
class ViewBuilder(common.ViewBuilder):
"""Model a server API response as a python dictionary."""
_collection_name = "servers"
_progress_statuses = (
"ACTIVE",
"BUILD",
"REBUILD",
"RESIZE",
"VERIFY_RESIZE",
)
_fault_statuses = (
"ERROR",
)
def __init__(self):
"""Initialize view builder."""
super(ViewBuilder, self).__init__()
self._address_builder = views_addresses.ViewBuilder()
self._flavor_builder = views_flavors.ViewBuilder()
self._image_builder = views_images.ViewBuilder()
def _skip_precooked(func):
def wrapped(self, request, instance):
if instance.get("_is_precooked"):
return dict(server=instance)
else:
return func(self, request, instance)
return wrapped
def create(self, request, instance):
"""View that should be returned when an instance is created."""
return {
"server": {
"id": instance["uuid"],
"links": self._get_links(request,
instance["uuid"],
self._collection_name),
},
}
@_skip_precooked
def basic(self, request, instance):
"""Generic, non-detailed view of an instance."""
return {
"server": {
"id": instance["uuid"],
"name": instance["display_name"],
"links": self._get_links(request,
instance["uuid"],
self._collection_name),
},
}
@_skip_precooked
def show(self, request, instance):
"""Detailed view of a single instance."""
server = {
"server": {
"id": instance["uuid"],
"name": instance["display_name"],
"status": self._get_vm_state(instance),
"tenant_id": instance.get("project_id") or "",
"user_id": instance.get("user_id") or "",
"metadata": self._get_metadata(instance),
"hostId": self._get_host_id(instance) or "",
"image": self._get_image(request, instance),
"flavor": self._get_flavor(request, instance),
"created": timeutils.isotime(instance["created_at"]),
"updated": timeutils.isotime(instance["updated_at"]),
"addresses": self._get_addresses(request, instance),
"accessIPv4": instance.get("access_ip_v4") or "",
"accessIPv6": instance.get("access_ip_v6") or "",
"links": self._get_links(request,
instance["uuid"],
self._collection_name),
},
}
_inst_fault = self._get_fault(request, instance)
if server["server"]["status"] in self._fault_statuses and _inst_fault:
server['server']['fault'] = _inst_fault
if server["server"]["status"] in self._progress_statuses:
server["server"]["progress"] = instance.get("progress", 0)
return server
def index(self, request, instances):
"""Show a list of servers without many details."""
return self._list_view(self.basic, request, instances)
def detail(self, request, instances):
"""Detailed view of a list of instance."""
return self._list_view(self.show, request, instances)
def _list_view(self, func, request, servers):
"""Provide a view for a list of servers."""
server_list = [func(request, server)["server"] for server in servers]
servers_links = self._get_collection_links(request,
servers,
self._collection_name)
servers_dict = dict(servers=server_list)
if servers_links:
servers_dict["servers_links"] = servers_links
return servers_dict
@staticmethod
def _get_metadata(instance):
metadata = instance.get("metadata", [])
return dict((item['key'], item['value']) for item in metadata)
@staticmethod
def _get_vm_state(instance):
return common.status_from_state(instance.get("vm_state"),
instance.get("task_state"))
@staticmethod
def _get_host_id(instance):
host = instance.get("host")
project = str(instance.get("project_id"))
if host:
sha_hash = hashlib.sha224(project + host) # pylint: disable=E1101
return sha_hash.hexdigest()
def _get_addresses(self, request, instance):
context = request.environ["nova.context"]
networks = common.get_networks_for_instance(context, instance)
return self._address_builder.index(networks)["addresses"]
def _get_image(self, request, instance):
image_ref = instance["image_ref"]
if image_ref:
image_id = str(common.get_id_from_href(image_ref))
bookmark = self._image_builder._get_bookmark_link(request,
image_id,
"images")
return {
"id": image_id,
"links": [{
"rel": "bookmark",
"href": bookmark,
}],
}
else:
return ""
def _get_flavor(self, request, instance):
instance_type = instance["instance_type"]
if not instance_type:
LOG.warn(_("Instance has had its instance_type removed "
"from the DB"), instance=instance)
return {}
flavor_id = instance_type["flavorid"]
flavor_bookmark = self._flavor_builder._get_bookmark_link(request,
flavor_id,
"flavors")
return {
"id": str(flavor_id),
"links": [{
"rel": "bookmark",
"href": flavor_bookmark,
}],
}
def _get_fault(self, request, instance):
fault = instance.get("fault", None)
if not fault:
return None
fault_dict = {
"code": fault["code"],
"created": timeutils.isotime(fault["created_at"]),
"message": fault["message"],
}
if fault.get('details', None):
is_admin = False
context = getattr(request, 'context', None)
if context:
is_admin = getattr(request.context, 'is_admin', False)
if is_admin or fault['code'] != 500:
fault_dict['details'] = fault["details"]
return fault_dict
| 36.540541
| 78
| 0.552392
|
e798536b02b48956f417357156ccbf8f6accd1ad
| 725
|
py
|
Python
|
build_source/lib/util.py
|
YusunPark/YusunPark.github.io
|
f0e6a0195936f4dfcaf71b9a101f1d7efdbceabd
|
[
"DOC"
] | null | null | null |
build_source/lib/util.py
|
YusunPark/YusunPark.github.io
|
f0e6a0195936f4dfcaf71b9a101f1d7efdbceabd
|
[
"DOC"
] | null | null | null |
build_source/lib/util.py
|
YusunPark/YusunPark.github.io
|
f0e6a0195936f4dfcaf71b9a101f1d7efdbceabd
|
[
"DOC"
] | null | null | null |
# coding: utf-8
import glob
import os
import collections
def get_doc_list(base_path):
"""
guide 목록 수집
"""
doc_info = collections.OrderedDict()
doc_path = []
doc_path += glob.glob(os.path.join(base_path, '*', '*'))
# doc_path += glob.glob(os.path.join(base_path, '*', '*_guide'))
# doc_path += glob.glob(os.path.join(base_path, '*', '*_manual'))
for path in sorted(doc_path):
if os.path.isdir(path):
_, guide_type, guide_name = path.rsplit(os.sep, 2)
doc_info['%s.%s'%(guide_type,guide_name)] = path
return doc_info
def _test():
import sys
base_path = sys.argv[1]
print(get_doc_list(base_path))
if __name__ == '__main__':
_test()
| 21.969697
| 69
| 0.609655
|
86e445a42a38ac308879effde6340970d8dc8920
| 2,424
|
py
|
Python
|
Quantum.py
|
RicardoBBS/ShorAlgo
|
8403e4ace7d09e245f1876fa66ea3b5b347dcda3
|
[
"MIT"
] | null | null | null |
Quantum.py
|
RicardoBBS/ShorAlgo
|
8403e4ace7d09e245f1876fa66ea3b5b347dcda3
|
[
"MIT"
] | null | null | null |
Quantum.py
|
RicardoBBS/ShorAlgo
|
8403e4ace7d09e245f1876fa66ea3b5b347dcda3
|
[
"MIT"
] | null | null | null |
import sys
import numpy as np
from matplotlib import pyplot as plt
from qiskit import QuantumCircuit, QuantumRegister, ClassicalRegister, execute, Aer, visualization
from random import randint
def to_binary(N,n_bit):
Nbin = np.zeros(n_bit, dtype=bool)
for i in range(1,n_bit+1):
bit_state = (N % (2**i) != 0)
if bit_state:
N -= 2**(i-1)
Nbin[n_bit-i] = bit_state
return Nbin
def modular_multiplication(qc,a,N):
"""
applies the unitary operator that implements
modular multiplication function x -> a*x(modN)
Only works for the particular case x -> 7*x(mod15)!
"""
for i in range(0,3):
qc.x(i)
qc.cx(2,1)
qc.cx(1,2)
qc.cx(2,1)
qc.cx(1,0)
qc.cx(0,1)
qc.cx(1,0)
qc.cx(3,0)
qc.cx(0,1)
qc.cx(1,0)
def quantum_period(a, N, n_bit):
# Quantum part
print(" Searching the period for N =", N, "and a =", a)
qr = QuantumRegister(n_bit)
cr = ClassicalRegister(n_bit)
qc = QuantumCircuit(qr,cr)
simulator = Aer.get_backend('qasm_simulator')
s0 = randint(1, N-1) # Chooses random int
sbin = to_binary(s0,n_bit) # Turns to binary
print("\n Starting at \n s =", s0, "=", "{0:b}".format(s0), "(bin)")
# Quantum register is initialized with s (in binary)
for i in range(0,n_bit):
if sbin[n_bit-i-1]:
qc.x(i)
s = s0
r=-1 # makes while loop run at least 2 times
# Applies modular multiplication transformation until we come back to initial number s
while s != s0 or r <= 0:
r+=1
# sets up circuit structure
qc.measure(qr, cr)
modular_multiplication(qc,a,N)
qc.draw('mpl')
# runs circuit and processes data
job = execute(qc,simulator, shots=10)
result_counts = job.result().get_counts(qc)
result_histogram_key = list(result_counts)[0] # https://qiskit.org/documentation/stubs/qiskit.result.Result.get_counts.html#qiskit.result.Result.get_counts
s = int(result_histogram_key, 2)
print(" ", result_counts)
plt.show()
print("\n Found period r =", r)
return r
if __name__ == '__main__':
a = 7
N = 15
n_bit=5
r = quantum_period(a, N, n_bit)
| 25.515789
| 163
| 0.565594
|
c8d46cf1694a6edef172faf02c8cd23becfa56ba
| 2,877
|
py
|
Python
|
train.py
|
zilongzheng/PaddleEBM
|
44356e281a21093b8a4607543a67f7c0601772c1
|
[
"Apache-2.0"
] | 7
|
2021-02-08T07:34:22.000Z
|
2021-02-20T03:52:50.000Z
|
train.py
|
zilongzheng/PaddleEBM
|
44356e281a21093b8a4607543a67f7c0601772c1
|
[
"Apache-2.0"
] | null | null | null |
train.py
|
zilongzheng/PaddleEBM
|
44356e281a21093b8a4607543a67f7c0601772c1
|
[
"Apache-2.0"
] | 1
|
2021-09-20T21:19:56.000Z
|
2021-09-20T21:19:56.000Z
|
import os
import sys
import argparse
from utils.config import get_config
from utils.setup import setup
from utils.logger import get_logger
from trainer import Trainer
def parse_args():
parser = argparse.ArgumentParser(description='PaddleEBM')
parser.add_argument('-c',
'--config-file',
metavar="FILE",
help='config file path')
# cuda setting
parser.add_argument('--no-cuda',
action='store_true',
default=False,
help='disables CUDA training')
# checkpoint and log
parser.add_argument('--resume',
type=str,
default=None,
help='put the path to resuming file if needed')
parser.add_argument('--load',
type=str,
default=None,
help='put the path to resuming file if needed')
# for evaluation
parser.add_argument('--val-interval',
type=int,
default=1,
help='run validation every interval')
parser.add_argument('--evaluate-only',
action='store_true',
default=False,
help='skip validation during training')
# config options
parser.add_argument('opts',
help='See config for all options',
default=None,
nargs=argparse.REMAINDER)
#for inference
parser.add_argument("--source_path",
default="",
metavar="FILE",
help="path to source image")
parser.add_argument("--reference_dir",
default="",
help="path to reference images")
parser.add_argument("--model_path", default=None, help="model for loading")
args = parser.parse_args()
return args
def main(args, cfg):
# init environment, include logger, dynamic graph, seed, device, train or test mode...
setup(args, cfg)
logger = get_logger()
logger.info(cfg)
# build trainer
trainer = Trainer(cfg)
# continue train or evaluate, checkpoint need contain epoch and optimizer info
if args.resume:
trainer.resume(args.resume)
# evaluate or finute, only load generator weights
elif args.load:
trainer.load(args.load)
if args.evaluate_only:
trainer.test()
return
# training, when keyboard interrupt save weights
try:
trainer.train()
except KeyboardInterrupt as e:
trainer.save(trainer.current_epoch)
trainer.close()
if __name__ == '__main__':
args = parse_args()
cfg = get_config(args.config_file)
main(args, cfg)
| 30.606383
| 90
| 0.546055
|
c209f5f2ebfc423ee8740a5fa7b9b5607e4743a7
| 4,521
|
py
|
Python
|
tests/test_completer.py
|
abhishek-kumar-code/redfishtool_nsfcac
|
928edbf2c9c3ebfd6cb4722a2a77b1e63372211c
|
[
"MIT"
] | 6
|
2019-03-08T00:00:41.000Z
|
2021-03-12T16:42:53.000Z
|
tests/test_completer.py
|
abhishek-kumar-code/redfishtool_nsfcac
|
928edbf2c9c3ebfd6cb4722a2a77b1e63372211c
|
[
"MIT"
] | null | null | null |
tests/test_completer.py
|
abhishek-kumar-code/redfishtool_nsfcac
|
928edbf2c9c3ebfd6cb4722a2a77b1e63372211c
|
[
"MIT"
] | 2
|
2019-05-08T02:43:06.000Z
|
2021-01-06T16:12:33.000Z
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import six
import unittest
from prompt_toolkit.document import Document
from http_prompt.completer import HttpPromptCompleter
from http_prompt.context import Context
class TestCompleter(unittest.TestCase):
def setUp(self):
self.context = Context('http://localhost', spec={
'paths': {
'/users': {},
'/users/{username}': {},
'/users/{username}/events': {},
'/users/{username}/orgs': {},
'/orgs': {},
'/orgs/{org}': {},
'/orgs/{org}/events': {},
'/orgs/{org}/members': {}
}
})
self.completer = HttpPromptCompleter(self.context)
self.completer_event = None
def get_completions(self, command):
if not isinstance(command, six.text_type):
command = six.u(command)
position = len(command)
completions = self.completer.get_completions(
Document(text=command, cursor_position=position),
self.completer_event)
return [c.text for c in completions]
def test_header_name(self):
result = self.get_completions('ctype')
self.assertEqual(result[0], 'Content-Type')
def test_header_value(self):
result = self.get_completions('Content-Type:json')
self.assertEqual(result[0], 'application/json')
def test_verify_option(self):
result = self.get_completions('--vfy')
self.assertEqual(result[0], '--verify')
def test_preview_then_action(self):
result = self.get_completions('httpie po')
self.assertEqual(result[0], 'post')
def test_rm_body_param(self):
self.context.body_params['my_name'] = 'dont_care'
result = self.get_completions('rm -b ')
self.assertEqual(result[0], 'my_name')
def test_rm_body_json_param(self):
self.context.body_json_params['number'] = 2
result = self.get_completions('rm -b ')
self.assertEqual(result[0], 'number')
def test_rm_querystring_param(self):
self.context.querystring_params['my_name'] = 'dont_care'
result = self.get_completions('rm -q ')
self.assertEqual(result[0], 'my_name')
def test_rm_header(self):
self.context.headers['Accept'] = 'dont_care'
result = self.get_completions('rm -h ')
self.assertEqual(result[0], 'Accept')
def test_rm_option(self):
self.context.options['--form'] = None
result = self.get_completions('rm -o ')
self.assertEqual(result[0], '--form')
def test_querystring_with_chinese(self):
result = self.get_completions('name==王')
self.assertFalse(result)
def test_header_with_spanish(self):
result = self.get_completions('X-Custom-Header:Jesú')
self.assertFalse(result)
def test_options_method(self):
result = self.get_completions('opt')
self.assertEqual(result[0], 'options')
def test_ls_no_path(self):
result = self.get_completions('ls ')
self.assertEqual(result, ['orgs', 'users'])
def test_ls_no_path_substring(self):
result = self.get_completions('ls o')
self.assertEqual(result, ['orgs'])
def test_ls_absolute_path(self):
result = self.get_completions('ls /users/1/')
self.assertEqual(result, ['events', 'orgs'])
def test_ls_absolute_path_substring(self):
result = self.get_completions('ls /users/1/e')
self.assertEqual(result, ['events'])
def test_ls_relative_path(self):
self.context.url = 'http://localhost/orgs'
result = self.get_completions('ls 1/')
self.assertEqual(result, ['events', 'members'])
def test_cd_no_path(self):
result = self.get_completions('cd ')
self.assertEqual(result, ['orgs', 'users'])
def test_cd_no_path_substring(self):
result = self.get_completions('cd o')
self.assertEqual(result, ['orgs'])
def test_cd_absolute_path(self):
result = self.get_completions('cd /users/1/')
self.assertEqual(result, ['events', 'orgs'])
def test_cd_absolute_path_substring(self):
result = self.get_completions('cd /users/1/e')
self.assertEqual(result, ['events'])
def test_cd_relative_path(self):
self.context.url = 'http://localhost/orgs'
result = self.get_completions('cd 1/')
self.assertEqual(result, ['events', 'members'])
| 33.738806
| 64
| 0.624419
|
41e3a6e838e79c0d5746ac8b6cb81bd8b924f04b
| 9,171
|
py
|
Python
|
src/sagemaker/chainer/model.py
|
satishpasumarthi/sagemaker-python-sdk
|
255a339ae985041ef47e3a80da91b9f54bca17b9
|
[
"Apache-2.0"
] | 1
|
2021-12-10T16:18:29.000Z
|
2021-12-10T16:18:29.000Z
|
src/sagemaker/chainer/model.py
|
satishpasumarthi/sagemaker-python-sdk
|
255a339ae985041ef47e3a80da91b9f54bca17b9
|
[
"Apache-2.0"
] | 20
|
2021-09-17T20:50:11.000Z
|
2021-12-09T00:29:02.000Z
|
src/sagemaker/chainer/model.py
|
satishpasumarthi/sagemaker-python-sdk
|
255a339ae985041ef47e3a80da91b9f54bca17b9
|
[
"Apache-2.0"
] | null | null | null |
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
"""Placeholder docstring"""
from __future__ import absolute_import
import logging
import sagemaker
from sagemaker import image_uris
from sagemaker.fw_utils import (
model_code_key_prefix,
python_deprecation_warning,
validate_version_or_image_args,
)
from sagemaker.model import FrameworkModel, MODEL_SERVER_WORKERS_PARAM_NAME
from sagemaker.chainer import defaults
from sagemaker.deserializers import NumpyDeserializer
from sagemaker.predictor import Predictor
from sagemaker.serializers import NumpySerializer
logger = logging.getLogger("sagemaker")
class ChainerPredictor(Predictor):
"""A Predictor for inference against Chainer Endpoints.
This is able to serialize Python lists, dictionaries, and numpy arrays to
multidimensional tensors for Chainer inference.
"""
def __init__(
self,
endpoint_name,
sagemaker_session=None,
serializer=NumpySerializer(),
deserializer=NumpyDeserializer(),
):
"""Initialize an ``ChainerPredictor``.
Args:
endpoint_name (str): The name of the endpoint to perform inference
on.
sagemaker_session (sagemaker.session.Session): Session object which
manages interactions with Amazon SageMaker APIs and any other
AWS services needed. If not specified, the estimator creates one
using the default AWS configuration chain.
serializer (sagemaker.serializers.BaseSerializer): Optional. Default
serializes input data to .npy format. Handles lists and numpy
arrays.
deserializer (sagemaker.deserializers.BaseDeserializer): Optional.
Default parses the response from .npy format to numpy array.
"""
super(ChainerPredictor, self).__init__(
endpoint_name,
sagemaker_session,
serializer=serializer,
deserializer=deserializer,
)
class ChainerModel(FrameworkModel):
"""An Chainer SageMaker ``Model`` that can be deployed to a SageMaker ``Endpoint``."""
_framework_name = "chainer"
def __init__(
self,
model_data,
role,
entry_point,
image_uri=None,
framework_version=None,
py_version=None,
predictor_cls=ChainerPredictor,
model_server_workers=None,
**kwargs
):
"""Initialize an ChainerModel.
Args:
model_data (str): The S3 location of a SageMaker model data
``.tar.gz`` file.
role (str): An AWS IAM role (either name or full ARN). The Amazon
SageMaker training jobs and APIs that create Amazon SageMaker
endpoints use this role to access training data and model
artifacts. After the endpoint is created, the inference code
might use the IAM role, if it needs to access an AWS resource.
entry_point (str): Path (absolute or relative) to the Python source
file which should be executed as the entry point to model
hosting. If ``source_dir`` is specified, then ``entry_point``
must point to a file located at the root of ``source_dir``.
image_uri (str): A Docker image URI (default: None). If not specified,
a default image for Chainer will be used.
If ``framework_version`` or ``py_version``
are ``None``, then ``image_uri`` is required. If ``image_uri`` is also ``None``,
then a ``ValueError`` will be raised.
framework_version (str): Chainer version you want to use for
executing your model training code. Defaults to ``None``. Required
unless ``image_uri`` is provided.
py_version (str): Python version you want to use for executing your
model training code. Defaults to ``None``. Required unless
``image_uri`` is provided.
predictor_cls (callable[str, sagemaker.session.Session]): A function
to call to create a predictor with an endpoint name and
SageMaker ``Session``. If specified, ``deploy()`` returns the
result of invoking this function on the created endpoint name.
model_server_workers (int): Optional. The number of worker processes
used by the inference server. If None, server will use one
worker per vCPU.
**kwargs: Keyword arguments passed to the
:class:`~sagemaker.model.FrameworkModel` initializer.
.. tip::
You can find additional parameters for initializing this class at
:class:`~sagemaker.model.FrameworkModel` and
:class:`~sagemaker.model.Model`.
"""
validate_version_or_image_args(framework_version, py_version, image_uri)
if py_version == "py2":
logger.warning(
python_deprecation_warning(self._framework_name, defaults.LATEST_PY2_VERSION)
)
self.framework_version = framework_version
self.py_version = py_version
super(ChainerModel, self).__init__(
model_data, image_uri, role, entry_point, predictor_cls=predictor_cls, **kwargs
)
self.model_server_workers = model_server_workers
def prepare_container_def(
self, instance_type=None, accelerator_type=None, serverless_inference_config=None
):
"""Return a container definition with framework configuration set in model environment.
Args:
instance_type (str): The EC2 instance type to deploy this Model to.
For example, 'ml.p2.xlarge'.
accelerator_type (str): The Elastic Inference accelerator type to
deploy to the instance for loading and making inferences to the
model. For example, 'ml.eia1.medium'.
serverless_inference_config (sagemaker.serverless.ServerlessInferenceConfig):
Specifies configuration related to serverless endpoint. Instance type is
not provided in serverless inference. So this is used to find image URIs.
Returns:
dict[str, str]: A container definition object usable with the
CreateModel API.
"""
deploy_image = self.image_uri
if not deploy_image:
if instance_type is None and serverless_inference_config is None:
raise ValueError(
"Must supply either an instance type (for choosing CPU vs GPU) or an image URI."
)
region_name = self.sagemaker_session.boto_session.region_name
deploy_image = self.serving_image_uri(
region_name,
instance_type,
accelerator_type=accelerator_type,
serverless_inference_config=serverless_inference_config,
)
deploy_key_prefix = model_code_key_prefix(self.key_prefix, self.name, deploy_image)
self._upload_code(deploy_key_prefix)
deploy_env = dict(self.env)
deploy_env.update(self._script_mode_env_vars())
if self.model_server_workers:
deploy_env[MODEL_SERVER_WORKERS_PARAM_NAME.upper()] = str(self.model_server_workers)
return sagemaker.container_def(deploy_image, self.model_data, deploy_env)
def serving_image_uri(
self, region_name, instance_type, accelerator_type=None, serverless_inference_config=None
):
"""Create a URI for the serving image.
Args:
region_name (str): AWS region where the image is uploaded.
instance_type (str): SageMaker instance type. Used to determine device type
(cpu/gpu/family-specific optimized).
serverless_inference_config (sagemaker.serverless.ServerlessInferenceConfig):
Specifies configuration related to serverless endpoint. Instance type is
not provided in serverless inference. So this is used to determine device type.
Returns:
str: The appropriate image URI based on the given parameters.
"""
return image_uris.retrieve(
self._framework_name,
region_name,
version=self.framework_version,
py_version=self.py_version,
instance_type=instance_type,
accelerator_type=accelerator_type,
image_scope="inference",
serverless_inference_config=serverless_inference_config,
)
| 43.056338
| 100
| 0.654127
|
edb3ed10d965bba40b906d4e4c53ff287885abde
| 73,615
|
py
|
Python
|
spyder/plugins/explorer/widgets/explorer.py
|
feiser2016/spyder
|
b6e7a45f8bb12b9be6b279218c44e19f603685e8
|
[
"MIT"
] | null | null | null |
spyder/plugins/explorer/widgets/explorer.py
|
feiser2016/spyder
|
b6e7a45f8bb12b9be6b279218c44e19f603685e8
|
[
"MIT"
] | null | null | null |
spyder/plugins/explorer/widgets/explorer.py
|
feiser2016/spyder
|
b6e7a45f8bb12b9be6b279218c44e19f603685e8
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
#
# Copyright © Spyder Project Contributors
# Licensed under the terms of the MIT License
# (see spyder/__init__.py for details)
"""Files and Directories Explorer"""
# pylint: disable=C0103
# pylint: disable=R0903
# pylint: disable=R0911
# pylint: disable=R0201
from __future__ import with_statement
# Standard library imports
import os
import os.path as osp
import re
import shutil
import subprocess
import sys
# Third party imports
from qtpy.compat import getexistingdirectory, getsavefilename
from qtpy.QtCore import (QDir, QFileInfo, QMimeData, QSize,
QSortFilterProxyModel, Qt, QTimer, QUrl, Signal, Slot)
from qtpy.QtGui import QDrag, QKeySequence
from qtpy.QtWidgets import (QApplication, QFileIconProvider, QFileSystemModel,
QHBoxLayout, QInputDialog, QLabel, QLineEdit,
QMenu, QMessageBox, QToolButton, QTreeView,
QVBoxLayout, QWidget)
# Local imports
from spyder.config.base import _, get_home_dir
from spyder.config.manager import CONF
from spyder.py3compat import str_lower, to_binary_string, to_text_string
from spyder.utils import encoding
from spyder.utils import icon_manager as ima
from spyder.utils import misc, programs, vcs
from spyder.utils.misc import getcwd_or_home
from spyder.utils.qthelpers import (add_actions, create_action,
create_plugin_layout, file_uri)
try:
from nbconvert import PythonExporter as nbexporter
except:
nbexporter = None # analysis:ignore
def open_file_in_external_explorer(filename):
if sys.platform == "darwin":
subprocess.call(["open", "-R", filename])
elif os.name == 'nt':
subprocess.call(["explorer", "/select,", filename])
else:
filename=os.path.dirname(filename)
subprocess.call(["xdg-open", filename])
def show_in_external_file_explorer(fnames=None):
"""Show files in external file explorer
Args:
fnames (list): Names of files to show.
"""
if not isinstance(fnames, (tuple, list)):
fnames = [fnames]
for fname in fnames:
open_file_in_external_explorer(fname)
def fixpath(path):
"""Normalize path fixing case, making absolute and removing symlinks"""
norm = osp.normcase if os.name == 'nt' else osp.normpath
return norm(osp.abspath(osp.realpath(path)))
def create_script(fname):
"""Create a new Python script"""
text = os.linesep.join(["# -*- coding: utf-8 -*-", "", ""])
try:
encoding.write(to_text_string(text), fname, 'utf-8')
except EnvironmentError as error:
QMessageBox.critical(_("Save Error"),
_("<b>Unable to save file '%s'</b>"
"<br><br>Error message:<br>%s"
) % (osp.basename(fname), str(error)))
def listdir(path, include=r'.', exclude=r'\.pyc$|^\.', show_all=False,
folders_only=False):
"""List files and directories"""
namelist = []
dirlist = [to_text_string(osp.pardir)]
for item in os.listdir(to_text_string(path)):
if re.search(exclude, item) and not show_all:
continue
if osp.isdir(osp.join(path, item)):
dirlist.append(item)
elif folders_only:
continue
elif re.search(include, item) or show_all:
namelist.append(item)
return sorted(dirlist, key=str_lower) + \
sorted(namelist, key=str_lower)
def has_subdirectories(path, include, exclude, show_all):
"""Return True if path has subdirectories"""
try:
# > 1 because of '..'
return len( listdir(path, include, exclude,
show_all, folders_only=True) ) > 1
except (IOError, OSError):
return False
class IconProvider(QFileIconProvider):
"""Project tree widget icon provider"""
def __init__(self, treeview):
super(IconProvider, self).__init__()
self.treeview = treeview
@Slot(int)
@Slot(QFileInfo)
def icon(self, icontype_or_qfileinfo):
"""Reimplement Qt method"""
if isinstance(icontype_or_qfileinfo, QFileIconProvider.IconType):
return super(IconProvider, self).icon(icontype_or_qfileinfo)
else:
qfileinfo = icontype_or_qfileinfo
fname = osp.normpath(to_text_string(qfileinfo.absoluteFilePath()))
if osp.isfile(fname) or osp.isdir(fname):
icon = ima.get_icon_by_extension_or_type(fname,
scale_factor=1.0)
else:
icon = ima.get_icon('binary', adjust_for_interface=True)
return icon
class DirView(QTreeView):
"""Base file/directory tree view"""
sig_edit = Signal(str)
sig_removed = Signal(str)
sig_removed_tree = Signal(str)
sig_renamed = Signal(str, str)
sig_renamed_tree = Signal(str, str)
sig_create_module = Signal(str)
sig_run = Signal(str)
sig_new_file = Signal(str)
sig_open_interpreter = Signal(str)
redirect_stdio = Signal(bool)
def __init__(self, parent=None):
super(DirView, self).__init__(parent)
self.parent_widget = parent
# Options
self.name_filters = ['*.py']
self.show_all = None
self.single_click_to_open = False
self.file_associations = {}
self._last_column = 0
self._last_order = True
header = self.header()
header.setContextMenuPolicy(Qt.CustomContextMenu)
self.menu = None
self.menu_header = QMenu(self)
self.common_actions = None
self.__expanded_state = None
self._to_be_loaded = None
self.fsmodel = None
self.setup_fs_model()
self._scrollbar_positions = None
self.setSelectionMode(self.ExtendedSelection)
self.shortcuts = self.create_shortcuts()
# Signals
header.customContextMenuRequested.connect(self.show_header_menu)
#---- Model
def setup_fs_model(self):
"""Setup filesystem model"""
filters = (QDir.AllDirs | QDir.Files | QDir.Drives
| QDir.NoDotAndDotDot | QDir.Hidden)
self.fsmodel = QFileSystemModel(self)
self.fsmodel.setFilter(filters)
self.fsmodel.setNameFilterDisables(False)
def install_model(self):
"""Install filesystem model"""
self.setModel(self.fsmodel)
def setup_view(self):
"""Setup view"""
self.install_model()
self.fsmodel.directoryLoaded.connect(
lambda: self.resizeColumnToContents(0))
self.setAnimated(False)
self.setSortingEnabled(True)
self.sortByColumn(0, Qt.AscendingOrder)
self.fsmodel.modelReset.connect(self.reset_icon_provider)
self.reset_icon_provider()
# Disable the view of .spyproject.
self.filter_directories()
def sortByColumn(self, column, order=Qt.AscendingOrder):
"""Override Qt method."""
header = self.header()
header.setSortIndicatorShown(True)
QTreeView.sortByColumn(self, column, order)
header.setSortIndicator(0, order)
self._last_column = column
self._last_order = not self._last_order
def show_header_menu(self, pos):
"""Display header menu."""
self.menu_header.clear()
kind = _('Kind') if sys.platform == 'darwin' else _('Type')
items = [_('Size'), kind, _("Date modified")]
header_actions = []
for idx, item in enumerate(items):
column = idx + 1
action = create_action(
self,
item,
None,
None,
toggled=lambda x, c=column: self.toggle_column(c),
)
action.blockSignals(True)
action.setChecked(not self.isColumnHidden(column))
action.blockSignals(False)
header_actions.append(action)
add_actions(self.menu_header, header_actions)
self.menu_header.popup(self.mapToGlobal(pos))
def toggle_column(self, column):
"""Toggle visibility of column."""
is_hidden = self.isColumnHidden(column)
self.setColumnHidden(column, not is_hidden)
visible_columns = [0]
for col in range(1, 4):
if not self.isColumnHidden(col):
visible_columns.append(col)
self.parent_widget.sig_option_changed.emit('visible_columns',
visible_columns)
def set_single_click_to_open(self, value):
"""Set single click to open items."""
self.single_click_to_open = value
self.parent_widget.sig_option_changed.emit('single_click_to_open',
value)
def set_file_associations(self, value):
"""Set file associations open items."""
self.file_associations = value
def set_name_filters(self, name_filters):
"""Set name filters"""
self.name_filters = name_filters
self.fsmodel.setNameFilters(name_filters)
def set_show_all(self, state):
"""Toggle 'show all files' state"""
if state:
self.fsmodel.setNameFilters([])
else:
self.fsmodel.setNameFilters(self.name_filters)
def get_filename(self, index):
"""Return filename associated with *index*"""
if index:
return osp.normpath(to_text_string(self.fsmodel.filePath(index)))
def get_index(self, filename):
"""Return index associated with filename"""
return self.fsmodel.index(filename)
def get_selected_filenames(self):
"""Return selected filenames"""
if self.selectionMode() == self.ExtendedSelection:
if self.selectionModel() is None:
return []
return [self.get_filename(idx) for idx in
self.selectionModel().selectedRows()]
else:
return [self.get_filename(self.currentIndex())]
def get_dirname(self, index):
"""Return dirname associated with *index*"""
fname = self.get_filename(index)
if fname:
if osp.isdir(fname):
return fname
else:
return osp.dirname(fname)
#---- Tree view widget
def setup(self, name_filters=['*.py', '*.pyw'], show_all=False,
single_click_to_open=False, file_associations={}):
"""Setup tree widget"""
self.setup_view()
self.set_name_filters(name_filters)
self.show_all = show_all
self.single_click_to_open = single_click_to_open
self.set_file_associations(file_associations)
# Setup context menu
self.menu = QMenu(self)
self.common_actions = self.setup_common_actions()
def reset_icon_provider(self):
"""Reset file system model icon provider
The purpose of this is to refresh files/directories icons"""
self.fsmodel.setIconProvider(IconProvider(self))
#---- Context menu
def setup_common_actions(self):
"""Setup context menu common actions"""
# Filters
self.filters_action = create_action(
self, _("Edit filename filters..."), None, ima.icon('filter'),
triggered=self.edit_filter,
)
# Show all files
self.all_action = create_action(self, _("Show all files"),
toggled=self.toggle_all)
# Show all files
self.single_click_to_open_action = create_action(
self,
_("Single click to open"),
toggled=self.set_single_click_to_open,
)
actions = [self.filters_action, self.all_action,
self.single_click_to_open_action]
self.update_common_actions()
return actions
def update_common_actions(self):
"""Update the status of widget actions based on stored state."""
self.set_show_all(self.show_all)
self.all_action.setChecked(self.show_all)
self.single_click_to_open_action.setChecked(self.single_click_to_open)
def get_common_file_associations(self, fnames):
"""
Return the list of common matching file associations for all fnames.
"""
all_values = []
for fname in fnames:
values = self.get_file_associations(fname)
all_values.append(values)
common = set(all_values[0])
for index in range(1, len(all_values)):
common = common.intersection(all_values[index])
return list(sorted(common))
def get_file_associations(self, fname):
"""Return the list of matching file associations for `fname`."""
for exts, values in self.file_associations.items():
clean_exts = [ext.strip() for ext in exts.split(',')]
for ext in clean_exts:
if fname.endswith((ext, ext[1:])):
values = values
break
else:
continue # Only excecuted if the inner loop did not break
break # Only excecuted if the inner loop did break
else:
values = []
return values
@Slot()
def edit_filter(self):
"""Edit name filters"""
filters, valid = QInputDialog.getText(self, _('Edit filename filters'),
_('Name filters:'),
QLineEdit.Normal,
", ".join(self.name_filters))
if valid:
filters = [f.strip() for f in to_text_string(filters).split(',')]
self.parent_widget.sig_option_changed.emit('name_filters', filters)
self.set_name_filters(filters)
@Slot(bool)
def toggle_all(self, checked):
"""Toggle all files mode"""
self.parent_widget.sig_option_changed.emit('show_all', checked)
self.show_all = checked
self.set_show_all(checked)
def create_file_new_actions(self, fnames):
"""Return actions for submenu 'New...'"""
if not fnames:
return []
new_file_act = create_action(self, _("File..."),
icon=ima.icon('filenew'),
triggered=lambda:
self.new_file(fnames[-1]))
new_module_act = create_action(self, _("Module..."),
icon=ima.icon('spyder'),
triggered=lambda:
self.new_module(fnames[-1]))
new_folder_act = create_action(self, _("Folder..."),
icon=ima.icon('folder_new'),
triggered=lambda:
self.new_folder(fnames[-1]))
new_package_act = create_action(self, _("Package..."),
icon=ima.icon('package_new'),
triggered=lambda:
self.new_package(fnames[-1]))
return [new_file_act, new_folder_act, None,
new_module_act, new_package_act]
def create_file_import_actions(self, fnames):
"""Return actions for submenu 'Import...'"""
return []
def create_file_manage_actions(self, fnames):
"""Return file management actions"""
only_files = all([osp.isfile(_fn) for _fn in fnames])
only_modules = all([osp.splitext(_fn)[1] in ('.py', '.pyw', '.ipy')
for _fn in fnames])
only_notebooks = all([osp.splitext(_fn)[1] == '.ipynb'
for _fn in fnames])
only_valid = all([encoding.is_text_file(_fn) for _fn in fnames])
run_action = create_action(self, _("Run"), icon=ima.icon('run'),
triggered=self.run)
open_with_spyder_action = create_action(
self, _("Open in Spyder"), icon=ima.icon('edit'),
triggered=self.open)
open_with_menu = QMenu(_('Open with'), self)
open_external_action = create_action(
self,
_("Open externally"),
triggered=self.open_external)
move_action = create_action(
self,
_("Move..."),
icon="move.png",
triggered=self.move)
delete_action = create_action(
self,
_("Delete..."),
icon=ima.icon('editdelete'),
triggered=self.delete)
rename_action = create_action(
self,
_("Rename..."),
icon=ima.icon('rename'),
triggered=self.rename)
ipynb_convert_action = create_action(
self,
_("Convert to Python script"),
icon=ima.icon('python'),
triggered=self.convert_notebooks)
copy_file_clipboard_action = create_action(
self,
_("Copy"),
QKeySequence(CONF.get_shortcut('explorer', 'copy file')),
icon=ima.icon('editcopy'),
triggered=self.copy_file_clipboard)
save_file_clipboard_action = create_action(
self,
_("Paste"),
QKeySequence(CONF.get_shortcut('explorer', 'paste file')),
icon=ima.icon('editpaste'),
triggered=self.save_file_clipboard)
copy_absolute_path_action = create_action(
self,
_("Copy Absolute Path"),
QKeySequence(CONF.get_shortcut('explorer', 'copy absolute path')),
triggered=self.copy_absolute_path)
copy_relative_path_action = create_action(
self,
_("Copy Relative Path"),
QKeySequence(CONF.get_shortcut('explorer', 'copy relative path')),
triggered=self.copy_relative_path)
actions = []
if only_modules:
actions.append(run_action)
if only_files:
if only_valid:
actions.append(open_with_spyder_action)
if len(fnames) == 1:
assoc = self.get_file_associations(fnames[0])
elif len(fnames) > 1:
assoc = self.get_common_file_associations(fnames)
if len(assoc) >= 1:
actions.append(open_with_menu)
open_with_actions = []
for app_name, fpath in assoc:
if not (os.path.isfile(fpath) or os.path.isdir(fpath)):
app_name += _(' (Application not found!)')
open_with_action = create_action(
self, app_name,
triggered=lambda x, y=fpath: self.open_association(y))
if not (os.path.isfile(fpath) or os.path.isdir(fpath)):
open_with_action.setDisabled(True)
open_with_actions.append(open_with_action)
open_external_action_2 = create_action(
self, _("Default external application"),
triggered=self.open_external)
open_with_actions.append(open_external_action_2)
add_actions(open_with_menu, open_with_actions)
else:
actions.append(open_external_action)
if sys.platform == 'darwin':
text = _("Show in Finder")
else:
text = _("Show in Folder")
external_fileexp_action = create_action(
self, text, triggered=self.show_in_external_file_explorer)
actions += [delete_action, rename_action]
basedir = fixpath(osp.dirname(fnames[0]))
if all([fixpath(osp.dirname(_fn)) == basedir for _fn in fnames]):
actions.append(move_action)
actions += [None]
actions += [copy_file_clipboard_action, save_file_clipboard_action,
copy_absolute_path_action, copy_relative_path_action]
if not QApplication.clipboard().mimeData().hasUrls():
save_file_clipboard_action.setDisabled(True)
actions += [None]
actions.append(external_fileexp_action)
actions.append(None)
if only_notebooks and nbexporter is not None:
actions.append(ipynb_convert_action)
dirname = fnames[0] if osp.isdir(fnames[0]) else osp.dirname(fnames[0])
if len(fnames) == 1:
# VCS support is quite limited for now, so we are enabling the VCS
# related actions only when a single file/folder is selected:
if vcs.is_vcs_repository(dirname):
commit_slot = lambda: self.vcs_command([dirname], 'commit')
browse_slot = lambda: self.vcs_command([dirname], 'browse')
vcs_ci = create_action(self, _("Commit"),
icon=ima.icon('vcs_commit'),
triggered=commit_slot)
vcs_log = create_action(self, _("Browse repository"),
icon=ima.icon('vcs_browse'),
triggered=browse_slot)
actions += [None, vcs_ci, vcs_log]
return actions
def create_folder_manage_actions(self, fnames):
"""Return folder management actions"""
actions = []
if os.name == 'nt':
_title = _("Open command prompt here")
else:
_title = _("Open terminal here")
_title = _("Open IPython console here")
action = create_action(self, _title,
triggered=lambda:
self.open_interpreter(fnames))
actions.append(action)
return actions
def create_context_menu_actions(self):
"""Create context menu actions"""
actions = []
fnames = self.get_selected_filenames()
new_actions = self.create_file_new_actions(fnames)
if len(new_actions) > 1:
# Creating a submenu only if there is more than one entry
new_act_menu = QMenu(_('New'), self)
add_actions(new_act_menu, new_actions)
actions.append(new_act_menu)
else:
actions += new_actions
import_actions = self.create_file_import_actions(fnames)
if len(import_actions) > 1:
# Creating a submenu only if there is more than one entry
import_act_menu = QMenu(_('Import'), self)
add_actions(import_act_menu, import_actions)
actions.append(import_act_menu)
else:
actions += import_actions
if actions:
actions.append(None)
if fnames:
actions += self.create_file_manage_actions(fnames)
if actions:
actions.append(None)
if fnames and all([osp.isdir(_fn) for _fn in fnames]):
actions += self.create_folder_manage_actions(fnames)
return actions
def update_menu(self):
"""Update context menu"""
self.menu.clear()
add_actions(self.menu, self.create_context_menu_actions())
#---- Events
def viewportEvent(self, event):
"""Reimplement Qt method"""
# Prevent Qt from crashing or showing warnings like:
# "QSortFilterProxyModel: index from wrong model passed to
# mapFromSource", probably due to the fact that the file system model
# is being built. See spyder-ide/spyder#1250.
#
# This workaround was inspired by the following KDE bug:
# https://bugs.kde.org/show_bug.cgi?id=172198
#
# Apparently, this is a bug from Qt itself.
self.executeDelayedItemsLayout()
return QTreeView.viewportEvent(self, event)
def contextMenuEvent(self, event):
"""Override Qt method"""
# Needed to handle not initialized menu.
# See spyder-ide/spyder#6975
try:
self.update_menu()
self.menu.popup(event.globalPos())
except AttributeError:
pass
def keyPressEvent(self, event):
"""Reimplement Qt method"""
if event.key() in (Qt.Key_Enter, Qt.Key_Return):
self.clicked()
elif event.key() == Qt.Key_F2:
self.rename()
elif event.key() == Qt.Key_Delete:
self.delete()
elif event.key() == Qt.Key_Backspace:
self.go_to_parent_directory()
else:
QTreeView.keyPressEvent(self, event)
def mouseDoubleClickEvent(self, event):
"""Reimplement Qt method"""
QTreeView.mouseDoubleClickEvent(self, event)
self.clicked()
def mouseReleaseEvent(self, event):
"""Reimplement Qt method."""
QTreeView.mouseReleaseEvent(self, event)
if self.single_click_to_open:
self.clicked()
@Slot()
def clicked(self):
"""
Selected item was single/double-clicked or enter/return was pressed.
"""
fnames = self.get_selected_filenames()
for fname in fnames:
if osp.isdir(fname):
self.directory_clicked(fname)
else:
if len(fnames) == 1:
assoc = self.get_file_associations(fnames[0])
elif len(fnames) > 1:
assoc = self.get_common_file_associations(fnames)
if assoc:
self.open_association(assoc[0][-1])
else:
self.open([fname])
def directory_clicked(self, dirname):
"""Directory was just clicked"""
pass
#---- Drag
def dragEnterEvent(self, event):
"""Drag and Drop - Enter event"""
event.setAccepted(event.mimeData().hasFormat("text/plain"))
def dragMoveEvent(self, event):
"""Drag and Drop - Move event"""
if (event.mimeData().hasFormat("text/plain")):
event.setDropAction(Qt.MoveAction)
event.accept()
else:
event.ignore()
def startDrag(self, dropActions):
"""Reimplement Qt Method - handle drag event"""
data = QMimeData()
data.setUrls([QUrl(fname) for fname in self.get_selected_filenames()])
drag = QDrag(self)
drag.setMimeData(data)
drag.exec_()
#---- File/Directory actions
def check_launch_error_codes(self, return_codes):
"""Check return codes and display message box if errors found."""
errors = [cmd for cmd, code in return_codes.items() if code != 0]
if errors:
if len(errors) == 1:
msg = _('The following command did not launch successfully:')
else:
msg = _('The following commands did not launch successfully:')
msg += '<br><br>' if len(errors) == 1 else '<br><br><ul>'
for error in errors:
if len(errors) == 1:
msg += '<code>{}</code>'.format(error)
else:
msg += '<li><code>{}</code></li>'.format(error)
msg += '' if len(errors) == 1 else '</ul>'
QMessageBox.warning(self, 'Application', msg, QMessageBox.Ok)
return not bool(errors)
@Slot()
def open(self, fnames=None):
"""Open files with the appropriate application"""
if fnames is None:
fnames = self.get_selected_filenames()
for fname in fnames:
if osp.isfile(fname) and encoding.is_text_file(fname):
self.parent_widget.sig_open_file.emit(fname)
else:
self.open_outside_spyder([fname])
@Slot()
def open_association(self, app_path):
"""Open files with given application executable path."""
if not (os.path.isdir(app_path) or os.path.isfile(app_path)):
return_codes = {app_path: 1}
app_path = None
else:
return_codes = {}
if app_path:
fnames = self.get_selected_filenames()
return_codes = programs.open_files_with_application(app_path,
fnames)
self.check_launch_error_codes(return_codes)
@Slot()
def open_external(self, fnames=None):
"""Open files with default application"""
if fnames is None:
fnames = self.get_selected_filenames()
for fname in fnames:
self.open_outside_spyder([fname])
def open_outside_spyder(self, fnames):
"""Open file outside Spyder with the appropriate application
If this does not work, opening unknown file in Spyder, as text file"""
for path in sorted(fnames):
path = file_uri(path)
ok = programs.start_file(path)
if not ok:
self.sig_edit.emit(path)
def open_interpreter(self, fnames):
"""Open interpreter"""
for path in sorted(fnames):
self.sig_open_interpreter.emit(path)
@Slot()
def run(self, fnames=None):
"""Run Python scripts"""
if fnames is None:
fnames = self.get_selected_filenames()
for fname in fnames:
self.sig_run.emit(fname)
def remove_tree(self, dirname):
"""Remove whole directory tree
Reimplemented in project explorer widget"""
while osp.exists(dirname):
try:
shutil.rmtree(dirname, onerror=misc.onerror)
except Exception as e:
# This handles a Windows problem with shutil.rmtree.
# See spyder-ide/spyder#8567.
if type(e).__name__ == "OSError":
error_path = to_text_string(e.filename)
shutil.rmtree(error_path, ignore_errors=True)
def delete_file(self, fname, multiple, yes_to_all):
"""Delete file"""
if multiple:
buttons = QMessageBox.Yes|QMessageBox.YesToAll| \
QMessageBox.No|QMessageBox.Cancel
else:
buttons = QMessageBox.Yes|QMessageBox.No
if yes_to_all is None:
answer = QMessageBox.warning(self, _("Delete"),
_("Do you really want "
"to delete <b>%s</b>?"
) % osp.basename(fname), buttons)
if answer == QMessageBox.No:
return yes_to_all
elif answer == QMessageBox.Cancel:
return False
elif answer == QMessageBox.YesToAll:
yes_to_all = True
try:
if osp.isfile(fname):
misc.remove_file(fname)
self.sig_removed.emit(fname)
else:
self.remove_tree(fname)
self.sig_removed_tree.emit(fname)
return yes_to_all
except EnvironmentError as error:
action_str = _('delete')
QMessageBox.critical(self, _("Project Explorer"),
_("<b>Unable to %s <i>%s</i></b>"
"<br><br>Error message:<br>%s"
) % (action_str, fname, to_text_string(error)))
return False
@Slot()
def delete(self, fnames=None):
"""Delete files"""
if fnames is None:
fnames = self.get_selected_filenames()
multiple = len(fnames) > 1
yes_to_all = None
for fname in fnames:
spyproject_path = osp.join(fname,'.spyproject')
if osp.isdir(fname) and osp.exists(spyproject_path):
QMessageBox.information(self, _('File Explorer'),
_("The current directory contains a "
"project.<br><br>"
"If you want to delete"
" the project, please go to "
"<b>Projects</b> » <b>Delete "
"Project</b>"))
else:
yes_to_all = self.delete_file(fname, multiple, yes_to_all)
if yes_to_all is not None and not yes_to_all:
# Canceled
break
def convert_notebook(self, fname):
"""Convert an IPython notebook to a Python script in editor"""
try:
script = nbexporter().from_filename(fname)[0]
except Exception as e:
QMessageBox.critical(self, _('Conversion error'),
_("It was not possible to convert this "
"notebook. The error is:\n\n") + \
to_text_string(e))
return
self.sig_new_file.emit(script)
@Slot()
def convert_notebooks(self):
"""Convert IPython notebooks to Python scripts in editor"""
fnames = self.get_selected_filenames()
if not isinstance(fnames, (tuple, list)):
fnames = [fnames]
for fname in fnames:
self.convert_notebook(fname)
def rename_file(self, fname):
"""Rename file"""
path, valid = QInputDialog.getText(self, _('Rename'),
_('New name:'), QLineEdit.Normal,
osp.basename(fname))
if valid:
path = osp.join(osp.dirname(fname), to_text_string(path))
if path == fname:
return
if osp.exists(path):
if QMessageBox.warning(self, _("Rename"),
_("Do you really want to rename <b>%s</b> and "
"overwrite the existing file <b>%s</b>?"
) % (osp.basename(fname), osp.basename(path)),
QMessageBox.Yes|QMessageBox.No) == QMessageBox.No:
return
try:
misc.rename_file(fname, path)
if osp.isfile(path):
self.sig_renamed.emit(fname, path)
else:
self.sig_renamed_tree.emit(fname, path)
return path
except EnvironmentError as error:
QMessageBox.critical(self, _("Rename"),
_("<b>Unable to rename file <i>%s</i></b>"
"<br><br>Error message:<br>%s"
) % (osp.basename(fname), to_text_string(error)))
@Slot()
def show_in_external_file_explorer(self, fnames=None):
"""Show file in external file explorer"""
if fnames is None:
fnames = self.get_selected_filenames()
show_in_external_file_explorer(fnames)
@Slot()
def rename(self, fnames=None):
"""Rename files"""
if fnames is None:
fnames = self.get_selected_filenames()
if not isinstance(fnames, (tuple, list)):
fnames = [fnames]
for fname in fnames:
self.rename_file(fname)
@Slot()
def move(self, fnames=None, directory=None):
"""Move files/directories"""
if fnames is None:
fnames = self.get_selected_filenames()
orig = fixpath(osp.dirname(fnames[0]))
while True:
self.redirect_stdio.emit(False)
if directory is None:
folder = getexistingdirectory(self, _("Select directory"),
orig)
else:
folder = directory
self.redirect_stdio.emit(True)
if folder:
folder = fixpath(folder)
if folder != orig:
break
else:
return
for fname in fnames:
basename = osp.basename(fname)
try:
misc.move_file(fname, osp.join(folder, basename))
except EnvironmentError as error:
QMessageBox.critical(self, _("Error"),
_("<b>Unable to move <i>%s</i></b>"
"<br><br>Error message:<br>%s"
) % (basename, to_text_string(error)))
def create_new_folder(self, current_path, title, subtitle, is_package):
"""Create new folder"""
if current_path is None:
current_path = ''
if osp.isfile(current_path):
current_path = osp.dirname(current_path)
name, valid = QInputDialog.getText(self, title, subtitle,
QLineEdit.Normal, "")
if valid:
dirname = osp.join(current_path, to_text_string(name))
try:
os.mkdir(dirname)
except EnvironmentError as error:
QMessageBox.critical(self, title,
_("<b>Unable "
"to create folder <i>%s</i></b>"
"<br><br>Error message:<br>%s"
) % (dirname, to_text_string(error)))
finally:
if is_package:
fname = osp.join(dirname, '__init__.py')
try:
with open(fname, 'wb') as f:
f.write(to_binary_string('#'))
return dirname
except EnvironmentError as error:
QMessageBox.critical(self, title,
_("<b>Unable "
"to create file <i>%s</i></b>"
"<br><br>Error message:<br>%s"
) % (fname,
to_text_string(error)))
def new_folder(self, basedir):
"""New folder"""
title = _('New folder')
subtitle = _('Folder name:')
self.create_new_folder(basedir, title, subtitle, is_package=False)
def new_package(self, basedir):
"""New package"""
title = _('New package')
subtitle = _('Package name:')
self.create_new_folder(basedir, title, subtitle, is_package=True)
def create_new_file(self, current_path, title, filters, create_func):
"""Create new file
Returns True if successful"""
if current_path is None:
current_path = ''
if osp.isfile(current_path):
current_path = osp.dirname(current_path)
self.redirect_stdio.emit(False)
fname, _selfilter = getsavefilename(self, title, current_path, filters)
self.redirect_stdio.emit(True)
if fname:
try:
create_func(fname)
return fname
except EnvironmentError as error:
QMessageBox.critical(self, _("New file"),
_("<b>Unable to create file <i>%s</i>"
"</b><br><br>Error message:<br>%s"
) % (fname, to_text_string(error)))
def new_file(self, basedir):
"""New file"""
title = _("New file")
filters = _("All files")+" (*)"
def create_func(fname):
"""File creation callback"""
if osp.splitext(fname)[1] in ('.py', '.pyw', '.ipy'):
create_script(fname)
else:
with open(fname, 'wb') as f:
f.write(to_binary_string(''))
fname = self.create_new_file(basedir, title, filters, create_func)
if fname is not None:
self.open([fname])
def new_module(self, basedir):
"""New module"""
title = _("New module")
filters = _("Python scripts")+" (*.py *.pyw *.ipy)"
def create_func(fname):
self.sig_create_module.emit(fname)
self.create_new_file(basedir, title, filters, create_func)
def go_to_parent_directory(self):
pass
def copy_path(self, fnames=None, method="absolute"):
"""Copy absolute or relative path to given file(s)/folders(s)."""
cb = QApplication.clipboard()
explorer_dir = self.fsmodel.rootPath()
if fnames is None:
fnames = self.get_selected_filenames()
if not isinstance(fnames, (tuple, list)):
fnames = [fnames]
fnames = [_fn.replace(os.sep, "/") for _fn in fnames]
if len(fnames) > 1:
if method == "absolute":
clipboard_files = ',\n'.join('"' + _fn + '"' for _fn in fnames)
elif method == "relative":
clipboard_files = ',\n'.join('"' +
osp.relpath(_fn, explorer_dir).
replace(os.sep, "/") + '"'
for _fn in fnames)
else:
if method == "absolute":
clipboard_files = fnames[0]
elif method == "relative":
clipboard_files = (osp.relpath(fnames[0], explorer_dir).
replace(os.sep, "/"))
copied_from = self.parent_widget.__class__.__name__
if copied_from == 'ProjectExplorerWidget' and method == 'relative':
clipboard_files = [path.strip(',"') for path in
clipboard_files.splitlines()]
clipboard_files = ['/'.join(path.strip('/').split('/')[1:]) for
path in clipboard_files]
if len(clipboard_files) > 1:
clipboard_files = ',\n'.join('"' + _fn + '"' for _fn in
clipboard_files)
else:
clipboard_files = clipboard_files[0]
cb.setText(clipboard_files, mode=cb.Clipboard)
@Slot()
def copy_absolute_path(self):
"""Copy absolute paths of named files/directories to the clipboard."""
self.copy_path(method="absolute")
@Slot()
def copy_relative_path(self):
"""Copy relative paths of named files/directories to the clipboard."""
self.copy_path(method="relative")
@Slot()
def copy_file_clipboard(self, fnames=None):
"""Copy file(s)/folders(s) to clipboard."""
if fnames is None:
fnames = self.get_selected_filenames()
if not isinstance(fnames, (tuple, list)):
fnames = [fnames]
try:
file_content = QMimeData()
file_content.setUrls([QUrl.fromLocalFile(_fn) for _fn in fnames])
cb = QApplication.clipboard()
cb.setMimeData(file_content, mode=cb.Clipboard)
except Exception as e:
QMessageBox.critical(self,
_('File/Folder copy error'),
_("Cannot copy this type of file(s) or "
"folder(s). The error was:\n\n")
+ to_text_string(e))
@Slot()
def save_file_clipboard(self, fnames=None):
"""Paste file from clipboard into file/project explorer directory."""
if fnames is None:
fnames = self.get_selected_filenames()
if not isinstance(fnames, (tuple, list)):
fnames = [fnames]
if len(fnames) >= 1:
try:
selected_item = osp.commonpath(fnames)
except AttributeError:
# py2 does not have commonpath
if len(fnames) > 1:
selected_item = osp.normpath(
osp.dirname(osp.commonprefix(fnames)))
else:
selected_item = fnames[0]
if osp.isfile(selected_item):
parent_path = osp.dirname(selected_item)
else:
parent_path = osp.normpath(selected_item)
cb_data = QApplication.clipboard().mimeData()
if cb_data.hasUrls():
urls = cb_data.urls()
for url in urls:
source_name = url.toLocalFile()
base_name = osp.basename(source_name)
if osp.isfile(source_name):
try:
while base_name in os.listdir(parent_path):
file_no_ext, file_ext = osp.splitext(base_name)
end_number = re.search(r'\d+$', file_no_ext)
if end_number:
new_number = int(end_number.group()) + 1
else:
new_number = 1
left_string = re.sub(r'\d+$', '', file_no_ext)
left_string += str(new_number)
base_name = left_string + file_ext
destination = osp.join(parent_path, base_name)
else:
destination = osp.join(parent_path, base_name)
shutil.copy(source_name, destination)
except Exception as e:
QMessageBox.critical(self, _('Error pasting file'),
_("Unsupported copy operation"
". The error was:\n\n")
+ to_text_string(e))
else:
try:
while base_name in os.listdir(parent_path):
end_number = re.search(r'\d+$', base_name)
if end_number:
new_number = int(end_number.group()) + 1
else:
new_number = 1
left_string = re.sub(r'\d+$', '', base_name)
base_name = left_string + str(new_number)
destination = osp.join(parent_path, base_name)
else:
destination = osp.join(parent_path, base_name)
if osp.realpath(destination).startswith(
osp.realpath(source_name) + os.sep):
QMessageBox.critical(self,
_('Recursive copy'),
_("Source is an ancestor"
" of destination"
" folder."))
continue
shutil.copytree(source_name, destination)
except Exception as e:
QMessageBox.critical(self,
_('Error pasting folder'),
_("Unsupported copy"
" operation. The error was:"
"\n\n") + to_text_string(e))
else:
QMessageBox.critical(self, _("No file in clipboard"),
_("No file in the clipboard. Please copy"
" a file to the clipboard first."))
else:
if QApplication.clipboard().mimeData().hasUrls():
QMessageBox.critical(self, _('Blank area'),
_("Cannot paste in the blank area."))
else:
pass
def create_shortcuts(self):
"""Create shortcuts for this file explorer."""
# Configurable
copy_clipboard_file = CONF.config_shortcut(
self.copy_file_clipboard,
context='explorer',
name='copy file',
parent=self)
paste_clipboard_file = CONF.config_shortcut(
self.save_file_clipboard,
context='explorer',
name='paste file',
parent=self)
copy_absolute_path = CONF.config_shortcut(
self.copy_absolute_path,
context='explorer',
name='copy absolute path',
parent=self)
copy_relative_path = CONF.config_shortcut(
self.copy_relative_path,
context='explorer',
name='copy relative path',
parent=self)
return [copy_clipboard_file, paste_clipboard_file, copy_absolute_path,
copy_relative_path]
def get_shortcut_data(self):
"""
Return shortcut data, a list of tuples (shortcut, text, default).
shortcut (QShortcut or QAction instance)
text (string): action/shortcut description
default (string): default key sequence
"""
return [sc.data for sc in self.shortcuts]
#----- VCS actions
def vcs_command(self, fnames, action):
"""VCS action (commit, browse)"""
try:
for path in sorted(fnames):
vcs.run_vcs_tool(path, action)
except vcs.ActionToolNotFound as error:
msg = _("For %s support, please install one of the<br/> "
"following tools:<br/><br/> %s")\
% (error.vcsname, ', '.join(error.tools))
QMessageBox.critical(self, _("Error"),
_("""<b>Unable to find external program.</b><br><br>%s""")
% to_text_string(msg))
#----- Settings
def get_scrollbar_position(self):
"""Return scrollbar positions"""
return (self.horizontalScrollBar().value(),
self.verticalScrollBar().value())
def set_scrollbar_position(self, position):
"""Set scrollbar positions"""
# Scrollbars will be restored after the expanded state
self._scrollbar_positions = position
if self._to_be_loaded is not None and len(self._to_be_loaded) == 0:
self.restore_scrollbar_positions()
def restore_scrollbar_positions(self):
"""Restore scrollbar positions once tree is loaded"""
hor, ver = self._scrollbar_positions
self.horizontalScrollBar().setValue(hor)
self.verticalScrollBar().setValue(ver)
def get_expanded_state(self):
"""Return expanded state"""
self.save_expanded_state()
return self.__expanded_state
def set_expanded_state(self, state):
"""Set expanded state"""
self.__expanded_state = state
self.restore_expanded_state()
def save_expanded_state(self):
"""Save all items expanded state"""
model = self.model()
# If model is not installed, 'model' will be None: this happens when
# using the Project Explorer without having selected a workspace yet
if model is not None:
self.__expanded_state = []
for idx in model.persistentIndexList():
if self.isExpanded(idx):
self.__expanded_state.append(self.get_filename(idx))
def restore_directory_state(self, fname):
"""Restore directory expanded state"""
root = osp.normpath(to_text_string(fname))
if not osp.exists(root):
# Directory has been (re)moved outside Spyder
return
for basename in os.listdir(root):
path = osp.normpath(osp.join(root, basename))
if osp.isdir(path) and path in self.__expanded_state:
self.__expanded_state.pop(self.__expanded_state.index(path))
if self._to_be_loaded is None:
self._to_be_loaded = []
self._to_be_loaded.append(path)
self.setExpanded(self.get_index(path), True)
if not self.__expanded_state:
self.fsmodel.directoryLoaded.disconnect(self.restore_directory_state)
def follow_directories_loaded(self, fname):
"""Follow directories loaded during startup"""
if self._to_be_loaded is None:
return
path = osp.normpath(to_text_string(fname))
if path in self._to_be_loaded:
self._to_be_loaded.remove(path)
if self._to_be_loaded is not None and len(self._to_be_loaded) == 0:
self.fsmodel.directoryLoaded.disconnect(
self.follow_directories_loaded)
if self._scrollbar_positions is not None:
# The tree view need some time to render branches:
QTimer.singleShot(50, self.restore_scrollbar_positions)
def restore_expanded_state(self):
"""Restore all items expanded state"""
if self.__expanded_state is not None:
# In the old project explorer, the expanded state was a dictionnary:
if isinstance(self.__expanded_state, list):
self.fsmodel.directoryLoaded.connect(
self.restore_directory_state)
self.fsmodel.directoryLoaded.connect(
self.follow_directories_loaded)
def filter_directories(self):
"""Filter the directories to show"""
index = self.get_index('.spyproject')
if index is not None:
self.setRowHidden(index.row(), index.parent(), True)
class ProxyModel(QSortFilterProxyModel):
"""Proxy model: filters tree view"""
def __init__(self, parent):
super(ProxyModel, self).__init__(parent)
self.root_path = None
self.path_list = []
self.setDynamicSortFilter(True)
def setup_filter(self, root_path, path_list):
"""Setup proxy model filter parameters"""
self.root_path = osp.normpath(to_text_string(root_path))
self.path_list = [osp.normpath(to_text_string(p)) for p in path_list]
self.invalidateFilter()
def sort(self, column, order=Qt.AscendingOrder):
"""Reimplement Qt method"""
self.sourceModel().sort(column, order)
def filterAcceptsRow(self, row, parent_index):
"""Reimplement Qt method"""
if self.root_path is None:
return True
index = self.sourceModel().index(row, 0, parent_index)
path = osp.normcase(osp.normpath(
to_text_string(self.sourceModel().filePath(index))))
if osp.normcase(self.root_path).startswith(path):
# This is necessary because parent folders need to be scanned
return True
else:
for p in [osp.normcase(p) for p in self.path_list]:
if path == p or path.startswith(p+os.sep):
return True
else:
return False
def data(self, index, role):
"""Show tooltip with full path only for the root directory"""
if role == Qt.ToolTipRole:
root_dir = self.path_list[0].split(osp.sep)[-1]
if index.data() == root_dir:
return osp.join(self.root_path, root_dir)
return QSortFilterProxyModel.data(self, index, role)
class FilteredDirView(DirView):
"""Filtered file/directory tree view"""
def __init__(self, parent=None):
super(FilteredDirView, self).__init__(parent)
self.proxymodel = None
self.setup_proxy_model()
self.root_path = None
#---- Model
def setup_proxy_model(self):
"""Setup proxy model"""
self.proxymodel = ProxyModel(self)
self.proxymodel.setSourceModel(self.fsmodel)
def install_model(self):
"""Install proxy model"""
if self.root_path is not None:
self.setModel(self.proxymodel)
def set_root_path(self, root_path):
"""Set root path"""
self.root_path = root_path
self.install_model()
index = self.fsmodel.setRootPath(root_path)
self.proxymodel.setup_filter(self.root_path, [])
self.setRootIndex(self.proxymodel.mapFromSource(index))
def get_index(self, filename):
"""Return index associated with filename"""
index = self.fsmodel.index(filename)
if index.isValid() and index.model() is self.fsmodel:
return self.proxymodel.mapFromSource(index)
def set_folder_names(self, folder_names):
"""Set folder names"""
assert self.root_path is not None
path_list = [osp.join(self.root_path, dirname)
for dirname in folder_names]
self.proxymodel.setup_filter(self.root_path, path_list)
def get_filename(self, index):
"""Return filename from index"""
if index:
path = self.fsmodel.filePath(self.proxymodel.mapToSource(index))
return osp.normpath(to_text_string(path))
def setup_project_view(self):
"""Setup view for projects"""
for i in [1, 2, 3]:
self.hideColumn(i)
self.setHeaderHidden(True)
# Disable the view of .spyproject.
self.filter_directories()
class ExplorerTreeWidget(DirView):
"""File/directory explorer tree widget
show_cd_only: Show current directory only
(True/False: enable/disable the option
None: enable the option and do not allow the user to disable it)"""
set_previous_enabled = Signal(bool)
set_next_enabled = Signal(bool)
sig_dir_opened = Signal(str)
"""
This signal is emitted when the current directory of the explorer tree
has changed.
Parameters
----------
new_root_directory: str
The new root directory path.
Notes
-----
This happens when clicking (or double clicking depending on the option)
a folder, turning this folder in the new root parent of the tree.
"""
def __init__(self, parent=None, show_cd_only=None):
DirView.__init__(self, parent)
self.history = []
self.histindex = None
self.show_cd_only = show_cd_only
self.__original_root_index = None
self.__last_folder = None
self.menu = None
self.common_actions = None
# Enable drag events
self.setDragEnabled(True)
#---- Context menu
def setup_common_actions(self):
"""Setup context menu common actions"""
actions = super(ExplorerTreeWidget, self).setup_common_actions()
if self.show_cd_only is None:
# Enabling the 'show current directory only' option but do not
# allow the user to disable it
self.show_cd_only = True
else:
# Show current directory only
cd_only_action = create_action(self,
_("Show current directory only"),
toggled=self.toggle_show_cd_only)
cd_only_action.setChecked(self.show_cd_only)
self.toggle_show_cd_only(self.show_cd_only)
actions.append(cd_only_action)
return actions
@Slot(bool)
def toggle_show_cd_only(self, checked):
"""Toggle show current directory only mode"""
self.parent_widget.sig_option_changed.emit('show_cd_only', checked)
self.show_cd_only = checked
if checked:
if self.__last_folder is not None:
self.set_current_folder(self.__last_folder)
elif self.__original_root_index is not None:
self.setRootIndex(self.__original_root_index)
#---- Refreshing widget
def set_current_folder(self, folder):
"""Set current folder and return associated model index"""
index = self.fsmodel.setRootPath(folder)
self.__last_folder = folder
if self.show_cd_only:
if self.__original_root_index is None:
self.__original_root_index = self.rootIndex()
self.setRootIndex(index)
return index
def get_current_folder(self):
return self.__last_folder
def refresh(self, new_path=None, force_current=False):
"""Refresh widget
force=False: won't refresh widget if path has not changed"""
if new_path is None:
new_path = getcwd_or_home()
if force_current:
index = self.set_current_folder(new_path)
self.expand(index)
self.setCurrentIndex(index)
self.set_previous_enabled.emit(
self.histindex is not None and self.histindex > 0)
self.set_next_enabled.emit(self.histindex is not None and \
self.histindex < len(self.history)-1)
# Disable the view of .spyproject.
self.filter_directories()
#---- Events
def directory_clicked(self, dirname):
"""Directory was just clicked"""
self.chdir(directory=dirname)
#---- Files/Directories Actions
@Slot()
def go_to_parent_directory(self):
"""Go to parent directory"""
self.chdir(osp.abspath(osp.join(getcwd_or_home(), os.pardir)))
@Slot()
def go_to_previous_directory(self):
"""Back to previous directory"""
self.histindex -= 1
self.chdir(browsing_history=True)
@Slot()
def go_to_next_directory(self):
"""Return to next directory"""
self.histindex += 1
self.chdir(browsing_history=True)
def update_history(self, directory):
"""Update browse history"""
try:
directory = osp.abspath(to_text_string(directory))
if directory in self.history:
self.histindex = self.history.index(directory)
except Exception:
user_directory = get_home_dir()
self.chdir(directory=user_directory, browsing_history=True)
def chdir(self, directory=None, browsing_history=False, emit=True):
"""
Set directory as working directory.
Parameters
----------
directory: str
The new working directory.
browsing_history: bool, optional
Add the new `directory`to the browsing history. Default is False.
emit: bool, optional
Emit a signal when changing the working directpory.
Default is True.
"""
if directory is not None:
directory = osp.abspath(to_text_string(directory))
if browsing_history:
directory = self.history[self.histindex]
elif directory in self.history:
self.histindex = self.history.index(directory)
else:
if self.histindex is None:
self.history = []
else:
self.history = self.history[:self.histindex+1]
if len(self.history) == 0 or \
(self.history and self.history[-1] != directory):
self.history.append(directory)
self.histindex = len(self.history)-1
directory = to_text_string(directory)
try:
PermissionError
FileNotFoundError
except NameError:
PermissionError = OSError
if os.name == 'nt':
FileNotFoundError = WindowsError
else:
FileNotFoundError = IOError
try:
os.chdir(directory)
self.refresh(new_path=directory, force_current=True)
if emit:
self.sig_dir_opened.emit(directory)
except PermissionError:
QMessageBox.critical(self.parent_widget, "Error",
_("You don't have the right permissions to "
"open this directory"))
except FileNotFoundError:
# Handle renaming directories on the fly.
# See spyder-ide/spyder#5183
self.history.pop(self.histindex)
class ExplorerWidget(QWidget):
"""Explorer widget"""
sig_option_changed = Signal(str, object)
sig_open_file = Signal(str)
open_dir = Signal(str)
def __init__(self, parent=None, name_filters=['*.py', '*.pyw'],
show_all=False, show_cd_only=None, show_icontext=True,
single_click_to_open=False, file_associations={},
options_button=None, visible_columns=[0, 3]):
QWidget.__init__(self, parent)
# Widgets
self.treewidget = ExplorerTreeWidget(self, show_cd_only=show_cd_only)
button_previous = QToolButton(self)
button_next = QToolButton(self)
button_parent = QToolButton(self)
self.button_menu = options_button or QToolButton(self)
self.action_widgets = [button_previous, button_next, button_parent,
self.button_menu]
# Actions
icontext_action = create_action(self, _("Show icons and text"),
toggled=self.toggle_icontext)
previous_action = create_action(self, text=_("Previous"),
icon=ima.icon('ArrowBack'),
triggered=self.treewidget.go_to_previous_directory)
next_action = create_action(self, text=_("Next"),
icon=ima.icon('ArrowForward'),
triggered=self.treewidget.go_to_next_directory)
parent_action = create_action(self, text=_("Parent"),
icon=ima.icon('ArrowUp'),
triggered=self.treewidget.go_to_parent_directory)
# Setup widgets
self.treewidget.setup(
name_filters=name_filters,
show_all=show_all,
single_click_to_open=single_click_to_open,
file_associations=file_associations,
)
# Setup of actions
ismac = sys.platform == 'darwin'
kind = _('Display kind') if ismac else _('Display type')
self.display_column_actions = []
for idx, text in enumerate([_('Display size'), kind,
_('Display date modified')]):
col = idx + 1
action = create_action(
self, text=text,
toggled=lambda x, c=col: self.treewidget.toggle_column(c))
if col in visible_columns:
self.treewidget.showColumn(col)
else:
self.treewidget.hideColumn(col)
action.blockSignals(True)
action.setChecked(not self.treewidget.isColumnHidden(col))
action.blockSignals(False)
self.display_column_actions.append(action)
self.treewidget.chdir(getcwd_or_home())
self.treewidget.common_actions += [None, icontext_action, None]
self.treewidget.common_actions += self.display_column_actions
button_previous.setDefaultAction(previous_action)
previous_action.setEnabled(False)
button_next.setDefaultAction(next_action)
next_action.setEnabled(False)
button_parent.setDefaultAction(parent_action)
self.toggle_icontext(show_icontext)
icontext_action.setChecked(show_icontext)
for widget in self.action_widgets:
widget.setAutoRaise(True)
widget.setIconSize(QSize(16, 16))
# Layouts
blayout = QHBoxLayout()
blayout.addWidget(button_previous)
blayout.addWidget(button_next)
blayout.addWidget(button_parent)
blayout.addStretch()
blayout.addWidget(self.button_menu)
layout = create_plugin_layout(blayout, self.treewidget)
self.setLayout(layout)
# Signals and slots
self.treewidget.set_previous_enabled.connect(
previous_action.setEnabled)
self.treewidget.set_next_enabled.connect(next_action.setEnabled)
self.sig_option_changed.connect(self.refresh_actions)
def refresh_actions(self, option, value):
"""Refresh column visibility actions."""
if option == 'visible_columns':
for col in range(1, 4):
is_hidden = self.treewidget.isColumnHidden(col)
action = self.display_column_actions[col - 1]
action.blockSignals(True)
action.setChecked(not is_hidden)
action.blockSignals(False)
@Slot(bool)
def toggle_icontext(self, state):
"""Toggle icon text"""
self.sig_option_changed.emit('show_icontext', state)
for widget in self.action_widgets:
if widget is not self.button_menu:
if state:
widget.setToolButtonStyle(Qt.ToolButtonTextBesideIcon)
else:
widget.setToolButtonStyle(Qt.ToolButtonIconOnly)
#==============================================================================
# Tests
#==============================================================================
class FileExplorerTest(QWidget):
def __init__(self, directory=None, file_associations={}):
super(FileExplorerTest, self).__init__()
if directory is not None:
self.directory = directory
else:
self.directory = osp.dirname(osp.abspath(__file__))
self.explorer = ExplorerWidget(self, show_cd_only=None,
file_associations=file_associations)
self.label_dir = QLabel("<b>Open dir:</b>")
self.label_file = QLabel("<b>Open file:</b>")
self.label1 = QLabel()
self.label_dir.setAlignment(Qt.AlignRight)
self.label2 = QLabel()
self.label_option = QLabel("<b>Option changed:</b>")
self.label3 = QLabel()
# Setup
self.explorer.treewidget.set_current_folder(self.directory)
self.label_file.setAlignment(Qt.AlignRight)
self.label_option.setAlignment(Qt.AlignRight)
# Layout
hlayout1 = QHBoxLayout()
hlayout1.addWidget(self.label_file)
hlayout1.addWidget(self.label1)
hlayout2 = QHBoxLayout()
hlayout2.addWidget(self.label_dir)
hlayout2.addWidget(self.label2)
hlayout3 = QHBoxLayout()
hlayout3.addWidget(self.label_option)
hlayout3.addWidget(self.label3)
vlayout = QVBoxLayout()
vlayout.addWidget(self.explorer)
vlayout.addLayout(hlayout1)
vlayout.addLayout(hlayout2)
vlayout.addLayout(hlayout3)
self.setLayout(vlayout)
# Signals
self.explorer.open_dir.connect(self.label2.setText)
self.explorer.open_dir.connect(
lambda: self.explorer.treewidget.refresh('..'))
self.explorer.sig_open_file.connect(self.label1.setText)
self.explorer.sig_option_changed.connect(
lambda x, y: self.label3.setText('option_changed: %r, %r' % (x, y)))
class ProjectExplorerTest(QWidget):
def __init__(self, parent=None):
QWidget.__init__(self, parent)
vlayout = QVBoxLayout()
self.setLayout(vlayout)
self.treewidget = FilteredDirView(self)
self.treewidget.setup_view()
self.treewidget.set_root_path(osp.dirname(osp.abspath(__file__)))
self.treewidget.set_folder_names(['variableexplorer'])
self.treewidget.setup_project_view()
vlayout.addWidget(self.treewidget)
def test(file_explorer):
from spyder.utils.qthelpers import qapplication
app = qapplication()
if file_explorer:
test = FileExplorerTest()
else:
test = ProjectExplorerTest()
test.resize(640, 480)
test.show()
app.exec_()
if __name__ == "__main__":
test(file_explorer=True)
test(file_explorer=False)
| 39.98642
| 82
| 0.545704
|
63d4fc3ec536d536b165d58ea27339117133136b
| 9,367
|
py
|
Python
|
satchmo/apps/payment/modules/paypal/views.py
|
predatell/satchmo
|
6ced1f845aadec240c7e433c3cbf4caca96e0d92
|
[
"BSD-3-Clause"
] | null | null | null |
satchmo/apps/payment/modules/paypal/views.py
|
predatell/satchmo
|
6ced1f845aadec240c7e433c3cbf4caca96e0d92
|
[
"BSD-3-Clause"
] | null | null | null |
satchmo/apps/payment/modules/paypal/views.py
|
predatell/satchmo
|
6ced1f845aadec240c7e433c3cbf4caca96e0d92
|
[
"BSD-3-Clause"
] | null | null | null |
from decimal import Decimal
from django.conf import settings
from django.http import HttpResponse, HttpResponseRedirect
from django.shortcuts import render
from django.utils.http import urlencode
from django.utils.translation import ugettext as _
from django.views.decorators.cache import never_cache
try:
from django.core.urlresolvers import NoReverseMatch
except ImportError:
from django.urls import NoReverseMatch
from livesettings.functions import config_get_group, config_value
from payment.config import gateway_live
from payment.utils import get_processor_by_key
from payment.views import payship
from satchmo_store.shop.models import Cart
from satchmo_store.shop.models import Order, OrderPayment
from satchmo_store.contact.models import Contact
from satchmo_utils.dynamic import lookup_url, lookup_template
from satchmo_utils.views import bad_or_missing
from six.moves import urllib
from sys import exc_info
from traceback import format_exception
import logging
from django.views.decorators.csrf import csrf_exempt
log = logging.getLogger()
def pay_ship_info(request):
return payship.base_pay_ship_info(request,
config_get_group('PAYMENT_PAYPAL'), payship.simple_pay_ship_process_form,
'shop/checkout/paypal/pay_ship.html')
pay_ship_info = never_cache(pay_ship_info)
def confirm_info(request):
payment_module = config_get_group('PAYMENT_PAYPAL')
try:
order = Order.objects.from_request(request)
except Order.DoesNotExist:
url = lookup_url(payment_module, 'satchmo_checkout-step1')
return HttpResponseRedirect(url)
tempCart = Cart.objects.from_request(request)
if tempCart.numItems == 0 and not order.is_partially_paid:
template = lookup_template(payment_module, 'shop/checkout/empty_cart.html')
return render(request, template)
# Check if the order is still valid
if not order.validate(request):
return render(request, 'shop/404.html', {'message': _('Your order is no longer valid.')})
template = lookup_template(payment_module, 'shop/checkout/paypal/confirm.html')
if payment_module.LIVE.value:
log.debug("live order on %s", payment_module.KEY.value)
url = payment_module.POST_URL.value
account = payment_module.BUSINESS.value
else:
url = payment_module.POST_TEST_URL.value
account = payment_module.BUSINESS_TEST.value
try:
address = lookup_url(payment_module,
payment_module.RETURN_ADDRESS.value, include_server=True)
except NoReverseMatch:
address = payment_module.RETURN_ADDRESS.value
try:
cart = Cart.objects.from_request(request)
except:
cart = None
try:
contact = Contact.objects.from_request(request)
except:
contact = None
if cart and contact:
cart.customer = contact
log.debug(':::Updating Cart %s for %s' % (cart, contact))
cart.save()
processor_module = payment_module.MODULE.load_module('processor')
processor = processor_module.PaymentProcessor(payment_module)
processor.create_pending_payment(order=order)
default_view_tax = config_value('TAX', 'DEFAULT_VIEW_TAX')
recurring = None
# Run only if subscription products are installed
if 'product.modules.subscription' in settings.INSTALLED_APPS:
order_items = order.orderitem_set.all()
for item in order_items:
if not item.product.is_subscription:
continue
if item.product.has_variants:
price = item.product.productvariation.get_qty_price(item.quantity, True)
else:
price = item.product.get_qty_price(item.quantity, True)
recurring = {'product':item.product, 'price':price.quantize(Decimal('.01'))}
trial0 = recurring['product'].subscriptionproduct.get_trial_terms(0)
if len(order_items) > 1 or trial0 is not None or recurring['price'] < order.balance:
recurring['trial1'] = {'price': order.balance,}
if trial0 is not None:
recurring['trial1']['expire_length'] = trial0.expire_length
recurring['trial1']['expire_unit'] = trial0.subscription.expire_unit[0]
# else:
# recurring['trial1']['expire_length'] = recurring['product'].subscriptionproduct.get_trial_terms(0).expire_length
trial1 = recurring['product'].subscriptionproduct.get_trial_terms(1)
if trial1 is not None:
recurring['trial2']['expire_length'] = trial1.expire_length
recurring['trial2']['expire_unit'] = trial1.subscription.expire_unit[0]
recurring['trial2']['price'] = trial1.price
ctx = {'order': order,
'post_url': url,
'default_view_tax': default_view_tax,
'business': account,
'currency_code': payment_module.CURRENCY_CODE.value,
'return_address': address,
'invoice': order.id,
'subscription': recurring,
'PAYMENT_LIVE' : gateway_live(payment_module)
}
return render(request, template, ctx)
confirm_info = never_cache(confirm_info)
@csrf_exempt
def ipn(request):
"""PayPal IPN (Instant Payment Notification)
Cornfirms that payment has been completed and marks invoice as paid.
Adapted from IPN cgi script provided at http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/456361"""
payment_module = config_get_group('PAYMENT_PAYPAL')
if payment_module.LIVE.value:
log.debug("Live IPN on %s", payment_module.KEY.value)
url = payment_module.POST_URL.value
account = payment_module.BUSINESS.value
else:
log.debug("Test IPN on %s", payment_module.KEY.value)
url = payment_module.POST_TEST_URL.value
account = payment_module.BUSINESS_TEST.value
PP_URL = url
try:
data = request.POST
log.debug("PayPal IPN data: " + repr(data))
if not confirm_ipn_data(request.raw_post_data, PP_URL):
return HttpResponse()
if not 'payment_status' in data or not data['payment_status'] == "Completed":
# We want to respond to anything that isn't a payment - but we won't insert into our database.
log.info("Ignoring IPN data for non-completed payment.")
return HttpResponse()
try:
invoice = data['invoice']
except:
invoice = data['item_number']
gross = data['mc_gross']
txn_id = data['txn_id']
if not OrderPayment.objects.filter(transaction_id=txn_id).count():
# If the payment hasn't already been processed:
order = Order.objects.get(pk=invoice)
order.add_status(status='New', notes=_("Paid through PayPal."))
processor = get_processor_by_key('PAYMENT_PAYPAL')
payment = processor.record_payment(order=order, amount=gross, transaction_id=txn_id)
if 'memo' in data:
if order.notes:
notes = order.notes + "\n"
else:
notes = ""
order.notes = notes + _('---Comment via Paypal IPN---') + '\n' + data['memo']
order.save()
log.debug("Saved order notes from Paypal")
# Run only if subscription products are installed
if 'product.modules.subscription' in settings.INSTALLED_APPS:
for item in order.orderitem_set.filter(product__subscriptionproduct__recurring=True, completed=False):
item.completed = True
item.save()
# We no longer empty the cart here. We do it on checkout success.
except:
log.exception(''.join(format_exception(*exc_info())))
return HttpResponse()
def confirm_ipn_data(query_string, PP_URL):
# data is the form data that was submitted to the IPN URL.
params = 'cmd=_notify-validate&' + query_string
req = urllib.request.Request(PP_URL)
req.add_header("Content-type", "application/x-www-form-urlencoded")
fo = urllib.request.urlopen(req, params)
ret = fo.read()
if ret == "VERIFIED":
log.info("PayPal IPN data verification was successful.")
else:
log.info("PayPal IPN data verification failed.")
log.debug("HTTP code %s, response text: '%s'" % (fo.code, ret))
return False
return True
def success(request):
"""
The order has been succesfully processed.
We clear out the cart but let the payment processing get called by IPN
"""
try:
order = Order.objects.from_request(request)
except Order.DoesNotExist:
return bad_or_missing(request, _('Your order has already been processed.'))
# Added to track total sold for each product
for item in order.orderitem_set.all():
product = item.product
product.total_sold += item.quantity
if config_value('PRODUCT','TRACK_INVENTORY'):
product.items_in_stock -= item.quantity
product.save()
# Clean up cart now, the rest of the order will be cleaned on paypal IPN
for cart in Cart.objects.filter(customer=order.contact):
cart.empty()
del request.session['orderID']
return render(request, 'shop/checkout/success.html', {'order': order})
success = never_cache(success)
| 38.706612
| 134
| 0.672254
|
6f245f652b4b1cd0f1a0cf6aa156f71485037a46
| 1,329
|
py
|
Python
|
Image_To_HTML.py
|
iamkazi/OCR-Tesseract
|
33a77cb4201725aac12c6fedb7a6d3ec26bd36dc
|
[
"Apache-2.0"
] | null | null | null |
Image_To_HTML.py
|
iamkazi/OCR-Tesseract
|
33a77cb4201725aac12c6fedb7a6d3ec26bd36dc
|
[
"Apache-2.0"
] | null | null | null |
Image_To_HTML.py
|
iamkazi/OCR-Tesseract
|
33a77cb4201725aac12c6fedb7a6d3ec26bd36dc
|
[
"Apache-2.0"
] | null | null | null |
import pytesseract
from PIL import Image
import os
#download tesseract from https://github.com/UB-Mannheim/tesseract/wiki
pytesseract.pytesseract.tesseract_cmd = r'C:\Users\iam_kazi\AppData\Local\Tesseract-OCR\tesseract.exe'
listfiles = os.listdir("C:\Data")
for x in listfiles:
img=Image.open("C:\\Data\\"+x)
img = img.convert ('L')
text=pytesseract.image_to_string(img)
li = text.split('\n')
for y in range(len(li)):
if("\n" in li[y]):
li[y]=li[y].strip("\n")
elif("\x0c" in li[y]):
li[y]=li[y].strip("\x0c")
elif("''" in li[y]):
li[y]=li[y].strip("''")
elif(" " in li[y] or " "):
li[y]=li[y].strip()
else:
pass
li = list(filter(('').__ne__,li))
with open (x[:-4]+'.html','w') as file:
file.write("""<!DOCTYPE html>
<html>
<head>
<title></title>
</head>
<body>
""")
if(li):
for y in li:
file.write(y+"<br>")
file.write("""</body>
</html>""")
| 32.414634
| 103
| 0.407825
|
21ff3925bc009fa27056f017a2bdb5538ae5ad65
| 1,038
|
py
|
Python
|
data/score_informed_transcription/midi/example_transpose_octave.py
|
bzcheeseman/Dali
|
a77c7ce60b20ce150a5927747e128688657907eb
|
[
"MIT"
] | null | null | null |
data/score_informed_transcription/midi/example_transpose_octave.py
|
bzcheeseman/Dali
|
a77c7ce60b20ce150a5927747e128688657907eb
|
[
"MIT"
] | null | null | null |
data/score_informed_transcription/midi/example_transpose_octave.py
|
bzcheeseman/Dali
|
a77c7ce60b20ce150a5927747e128688657907eb
|
[
"MIT"
] | null | null | null |
from .MidiOutFile import MidiOutFile
from .MidiInFile import MidiInFile
"""
This is an example of the smallest possible type 0 midi file, where
all the midi events are in the same track.
"""
class Transposer(MidiOutFile):
"Transposes all notes by 1 octave"
def _transp(self, ch, note):
if ch != 9: # not the drums!
note += 12
if note > 127:
note = 127
return note
def note_on(self, channel=0, note=0x40, velocity=0x40):
note = self._transp(channel, note)
MidiOutFile.note_on(self, channel, note, velocity)
def note_off(self, channel=0, note=0x40, velocity=0x40):
note = self._transp(channel, note)
MidiOutFile.note_off(self, channel, note, velocity)
out_file = 'midiout/transposed.mid'
midi_out = Transposer(out_file)
#in_file = 'midiout/minimal_type0.mid'
#in_file = 'test/midifiles/Lola.mid'
in_file = 'test/midifiles/tennessee_waltz.mid'
midi_in = MidiInFile(midi_out, in_file)
midi_in.read()
| 25.317073
| 68
| 0.657033
|
42d9fc68b5afb7e7bf74e23d0ccf18ca38e99fa5
| 13,861
|
py
|
Python
|
siiptool/thirdparty/edk2_capsule_tool/Common/Uefi/Capsule/FmpCapsuleHeader.py
|
kdbarnes-intel/iotg-fbu
|
bf6c38240ef13c64d2776e48ec277b3c8bebce6f
|
[
"BSD-2-Clause"
] | 15
|
2020-03-21T03:19:46.000Z
|
2022-03-02T07:12:57.000Z
|
siiptool/thirdparty/edk2_capsule_tool/Common/Uefi/Capsule/FmpCapsuleHeader.py
|
kdbarnes-intel/iotg-fbu
|
bf6c38240ef13c64d2776e48ec277b3c8bebce6f
|
[
"BSD-2-Clause"
] | 7
|
2020-03-10T03:17:24.000Z
|
2021-06-08T17:47:34.000Z
|
siiptool/thirdparty/edk2_capsule_tool/Common/Uefi/Capsule/FmpCapsuleHeader.py
|
kdbarnes-intel/iotg-fbu
|
bf6c38240ef13c64d2776e48ec277b3c8bebce6f
|
[
"BSD-2-Clause"
] | 11
|
2020-02-28T19:52:22.000Z
|
2022-03-02T07:15:51.000Z
|
## @file
# Module that encodes and decodes a EFI_FIRMWARE_MANAGEMENT_CAPSULE_HEADER with
# a payload.
#
# Copyright (c) 2018, Intel Corporation. All rights reserved.<BR>
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
#
'''
FmpCapsuleHeader
'''
import struct
import uuid
class FmpCapsuleImageHeaderClass (object):
# typedef struct {
# UINT32 Version;
#
# ///
# /// Used to identify device firmware targeted by this update. This guid is matched by
# /// system firmware against ImageTypeId field within a EFI_FIRMWARE_IMAGE_DESCRIPTOR
# ///
# EFI_GUID UpdateImageTypeId;
#
# ///
# /// Passed as ImageIndex in call to EFI_FIRMWARE_MANAGEMENT_PROTOCOL.SetImage ()
# ///
# UINT8 UpdateImageIndex;
# UINT8 reserved_bytes[3];
#
# ///
# /// Size of the binary update image which immediately follows this structure
# ///
# UINT32 UpdateImageSize;
#
# ///
# /// Size of the VendorCode bytes which optionally immediately follow binary update image in the capsule
# ///
# UINT32 UpdateVendorCodeSize;
#
# ///
# /// The HardwareInstance to target with this update. If value is zero it means match all
# /// HardwareInstances. This field allows update software to target only a single device in
# /// cases where there are more than one device with the same ImageTypeId GUID.
# /// This header is outside the signed data of the Authentication Info structure and
# /// therefore can be modified without changing the Auth data.
# ///
# UINT64 UpdateHardwareInstance;
# } EFI_FIRMWARE_MANAGEMENT_CAPSULE_IMAGE_HEADER;
#
# #define EFI_FIRMWARE_MANAGEMENT_CAPSULE_IMAGE_HEADER_INIT_VERSION 0x00000002
_StructFormat = '<I16sB3BIIQ'
_StructSize = struct.calcsize (_StructFormat)
EFI_FIRMWARE_MANAGEMENT_CAPSULE_IMAGE_HEADER_INIT_VERSION = 0x00000002
def __init__ (self):
self._Valid = False
self.Version = self.EFI_FIRMWARE_MANAGEMENT_CAPSULE_IMAGE_HEADER_INIT_VERSION
self.UpdateImageTypeId = uuid.UUID ('00000000-0000-0000-0000-000000000000')
self.UpdateImageIndex = 0
self.UpdateImageSize = 0
self.UpdateVendorCodeSize = 0
self.UpdateHardwareInstance = 0x0000000000000000
self.Payload = b''
self.VendorCodeBytes = b''
def Encode (self):
self.UpdateImageSize = len (self.Payload)
self.UpdateVendorCodeSize = len (self.VendorCodeBytes)
FmpCapsuleImageHeader = struct.pack (
self._StructFormat,
self.Version,
self.UpdateImageTypeId.bytes_le,
self.UpdateImageIndex,
0,0,0,
self.UpdateImageSize,
self.UpdateVendorCodeSize,
self.UpdateHardwareInstance
)
self._Valid = True
return FmpCapsuleImageHeader + self.Payload + self.VendorCodeBytes
def Decode (self, Buffer):
if len (Buffer) < self._StructSize:
raise ValueError
(Version, UpdateImageTypeId, UpdateImageIndex, r0, r1, r2, UpdateImageSize, UpdateVendorCodeSize, UpdateHardwareInstance) = \
struct.unpack (
self._StructFormat,
Buffer[0:self._StructSize]
)
if Version < self.EFI_FIRMWARE_MANAGEMENT_CAPSULE_IMAGE_HEADER_INIT_VERSION:
raise ValueError
if UpdateImageIndex < 1:
raise ValueError
if UpdateImageSize + UpdateVendorCodeSize != len (Buffer[self._StructSize:]):
raise ValueError
self.Version = Version
self.UpdateImageTypeId = uuid.UUID (bytes_le = UpdateImageTypeId)
self.UpdateImageIndex = UpdateImageIndex
self.UpdateImageSize = UpdateImageSize
self.UpdateVendorCodeSize = UpdateVendorCodeSize
self.UpdateHardwareInstance = UpdateHardwareInstance
self.Payload = Buffer[self._StructSize:self._StructSize + UpdateImageSize]
self.VendorCodeBytes = Buffer[self._StructSize + UpdateImageSize:]
self._Valid = True
return Buffer[self._StructSize:]
def DumpInfo (self):
if not self._Valid:
raise ValueError
print ('EFI_FIRMWARE_MANAGEMENT_CAPSULE_IMAGE_HEADER.Version = {Version:08X}'.format (Version = self.Version))
print ('EFI_FIRMWARE_MANAGEMENT_CAPSULE_IMAGE_HEADER.UpdateImageTypeId = {UpdateImageTypeId}'.format (UpdateImageTypeId = str(self.UpdateImageTypeId).upper()))
print ('EFI_FIRMWARE_MANAGEMENT_CAPSULE_IMAGE_HEADER.UpdateImageIndex = {UpdateImageIndex:08X}'.format (UpdateImageIndex = self.UpdateImageIndex))
print ('EFI_FIRMWARE_MANAGEMENT_CAPSULE_IMAGE_HEADER.UpdateImageSize = {UpdateImageSize:08X}'.format (UpdateImageSize = self.UpdateImageSize))
print ('EFI_FIRMWARE_MANAGEMENT_CAPSULE_IMAGE_HEADER.UpdateVendorCodeSize = {UpdateVendorCodeSize:08X}'.format (UpdateVendorCodeSize = self.UpdateVendorCodeSize))
print ('EFI_FIRMWARE_MANAGEMENT_CAPSULE_IMAGE_HEADER.UpdateHardwareInstance = {UpdateHardwareInstance:016X}'.format (UpdateHardwareInstance = self.UpdateHardwareInstance))
print ('sizeof (Payload) = {Size:08X}'.format (Size = len (self.Payload)))
print ('sizeof (VendorCodeBytes) = {Size:08X}'.format (Size = len (self.VendorCodeBytes)))
class FmpCapsuleHeaderClass (object):
# typedef struct {
# UINT32 Version;
#
# ///
# /// The number of drivers included in the capsule and the number of corresponding
# /// offsets stored in ItemOffsetList array.
# ///
# UINT16 EmbeddedDriverCount;
#
# ///
# /// The number of payload items included in the capsule and the number of
# /// corresponding offsets stored in the ItemOffsetList array.
# ///
# UINT16 PayloadItemCount;
#
# ///
# /// Variable length array of dimension [EmbeddedDriverCount + PayloadItemCount]
# /// containing offsets of each of the drivers and payload items contained within the capsule
# ///
# // UINT64 ItemOffsetList[];
# } EFI_FIRMWARE_MANAGEMENT_CAPSULE_HEADER;
#
# #define EFI_FIRMWARE_MANAGEMENT_CAPSULE_HEADER_INIT_VERSION 0x00000001
_StructFormat = '<IHH'
_StructSize = struct.calcsize (_StructFormat)
_ItemOffsetFormat = '<Q'
_ItemOffsetSize = struct.calcsize (_ItemOffsetFormat)
EFI_FIRMWARE_MANAGEMENT_CAPSULE_HEADER_INIT_VERSION = 0x00000001
def __init__ (self):
self._Valid = False
self.Version = self.EFI_FIRMWARE_MANAGEMENT_CAPSULE_HEADER_INIT_VERSION
self.EmbeddedDriverCount = 0
self.PayloadItemCount = 0
self._ItemOffsetList = []
self._EmbeddedDriverList = []
self._PayloadList = []
self._FmpCapsuleImageHeaderList = []
def AddEmbeddedDriver (self, EmbeddedDriver):
self._EmbeddedDriverList.append (EmbeddedDriver)
def GetEmbeddedDriver (self, Index):
if Index > len (self._EmbeddedDriverList):
raise ValueError
return self._EmbeddedDriverList[Index]
def AddPayload (self, UpdateImageTypeId, Payload = b'', VendorCodeBytes = b'', HardwareInstance = 0):
self._PayloadList.append ((UpdateImageTypeId, Payload, VendorCodeBytes, HardwareInstance))
def GetFmpCapsuleImageHeader (self, Index):
if Index >= len (self._FmpCapsuleImageHeaderList):
raise ValueError
return self._FmpCapsuleImageHeaderList[Index]
def Encode (self):
self.EmbeddedDriverCount = len (self._EmbeddedDriverList)
self.PayloadItemCount = len (self._PayloadList)
FmpCapsuleHeader = struct.pack (
self._StructFormat,
self.Version,
self.EmbeddedDriverCount,
self.PayloadItemCount
)
FmpCapsuleData = b''
Offset = self._StructSize + (self.EmbeddedDriverCount + self.PayloadItemCount) * self._ItemOffsetSize
for EmbeddedDriver in self._EmbeddedDriverList:
FmpCapsuleData = FmpCapsuleData + EmbeddedDriver
self._ItemOffsetList.append (Offset)
Offset = Offset + len (EmbeddedDriver)
Index = 1
for (UpdateImageTypeId, Payload, VendorCodeBytes, HardwareInstance) in self._PayloadList:
FmpCapsuleImageHeader = FmpCapsuleImageHeaderClass ()
FmpCapsuleImageHeader.UpdateImageTypeId = UpdateImageTypeId
FmpCapsuleImageHeader.UpdateImageIndex = Index
FmpCapsuleImageHeader.Payload = Payload
FmpCapsuleImageHeader.VendorCodeBytes = VendorCodeBytes
FmpCapsuleImageHeader.UpdateHardwareInstance = HardwareInstance
FmpCapsuleImage = FmpCapsuleImageHeader.Encode ()
FmpCapsuleData = FmpCapsuleData + FmpCapsuleImage
self._ItemOffsetList.append (Offset)
self._FmpCapsuleImageHeaderList.append (FmpCapsuleImageHeader)
Offset = Offset + len (FmpCapsuleImage)
Index = Index + 1
for Offset in self._ItemOffsetList:
FmpCapsuleHeader = FmpCapsuleHeader + struct.pack (self._ItemOffsetFormat, Offset)
self._Valid = True
return FmpCapsuleHeader + FmpCapsuleData
def Decode (self, Buffer):
if len (Buffer) < self._StructSize:
raise ValueError
(Version, EmbeddedDriverCount, PayloadItemCount) = \
struct.unpack (
self._StructFormat,
Buffer[0:self._StructSize]
)
if Version < self.EFI_FIRMWARE_MANAGEMENT_CAPSULE_HEADER_INIT_VERSION:
raise ValueError
self.Version = Version
self.EmbeddedDriverCount = EmbeddedDriverCount
self.PayloadItemCount = PayloadItemCount
self._ItemOffsetList = []
self._EmbeddedDriverList = []
self._PayloadList = []
self._FmpCapsuleImageHeaderList = []
#
# Parse the ItemOffsetList values
#
Offset = self._StructSize
for Index in range (0, EmbeddedDriverCount + PayloadItemCount):
ItemOffset = struct.unpack (self._ItemOffsetFormat, Buffer[Offset:Offset + self._ItemOffsetSize])[0]
if ItemOffset >= len (Buffer):
raise ValueError
self._ItemOffsetList.append (ItemOffset)
Offset = Offset + self._ItemOffsetSize
Result = Buffer[Offset:]
#
# Parse the EmbeddedDrivers
#
for Index in range (0, EmbeddedDriverCount):
Offset = self._ItemOffsetList[Index]
if Index < (len (self._ItemOffsetList) - 1):
Length = self._ItemOffsetList[Index + 1] - Offset
else:
Length = len (Buffer) - Offset
self.AddEmbeddedDriver (Buffer[Offset:Offset + Length])
#
# Parse the Payloads that are FMP Capsule Images
#
for Index in range (EmbeddedDriverCount, EmbeddedDriverCount + PayloadItemCount):
Offset = self._ItemOffsetList[Index]
if Index < (len (self._ItemOffsetList) - 1):
Length = self._ItemOffsetList[Index + 1] - Offset
else:
Length = len (Buffer) - Offset
FmpCapsuleImageHeader = FmpCapsuleImageHeaderClass ()
FmpCapsuleImageHeader.Decode (Buffer[Offset:Offset + Length])
self.AddPayload (
FmpCapsuleImageHeader.UpdateImageTypeId,
FmpCapsuleImageHeader.Payload,
FmpCapsuleImageHeader.VendorCodeBytes
)
self._FmpCapsuleImageHeaderList.append (FmpCapsuleImageHeader)
self._Valid = True
return Result
def DumpInfo (self):
if not self._Valid:
raise ValueError
print ('EFI_FIRMWARE_MANAGEMENT_CAPSULE_HEADER.Version = {Version:08X}'.format (Version = self.Version))
print ('EFI_FIRMWARE_MANAGEMENT_CAPSULE_HEADER.EmbeddedDriverCount = {EmbeddedDriverCount:08X}'.format (EmbeddedDriverCount = self.EmbeddedDriverCount))
print ('EFI_FIRMWARE_MANAGEMENT_CAPSULE_HEADER.PayloadItemCount = {PayloadItemCount:08X}'.format (PayloadItemCount = self.PayloadItemCount))
print ('EFI_FIRMWARE_MANAGEMENT_CAPSULE_HEADER.ItemOffsetList = ')
for Offset in self._ItemOffsetList:
print (' {Offset:016X}'.format (Offset = Offset))
for FmpCapsuleImageHeader in self._FmpCapsuleImageHeaderList:
FmpCapsuleImageHeader.DumpInfo ()
| 45.745875
| 179
| 0.626362
|
44f1bf12451cf3b5934d89daa0e2a8643a358709
| 1,943
|
py
|
Python
|
samples/snippets/run_notification.py
|
adamread/python-bigquery-datatransfer
|
efdcdbbe743595a505f10e0d596a39d362223aaa
|
[
"Apache-2.0"
] | 58
|
2020-03-05T16:06:45.000Z
|
2022-03-28T18:20:46.000Z
|
samples/snippets/run_notification.py
|
adamread/python-bigquery-datatransfer
|
efdcdbbe743595a505f10e0d596a39d362223aaa
|
[
"Apache-2.0"
] | 120
|
2020-02-05T09:56:10.000Z
|
2022-03-23T00:19:09.000Z
|
samples/snippets/run_notification.py
|
adamread/python-bigquery-datatransfer
|
efdcdbbe743595a505f10e0d596a39d362223aaa
|
[
"Apache-2.0"
] | 21
|
2020-02-05T23:11:23.000Z
|
2022-01-29T08:07:36.000Z
|
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
def run_notification(transfer_config_name, pubsub_topic):
orig_transfer_config_name = transfer_config_name
orig_pubsub_topic = pubsub_topic
# [START bigquerydatatransfer_run_notification]
transfer_config_name = "projects/1234/locations/us/transferConfigs/abcd"
pubsub_topic = "projects/PROJECT-ID/topics/TOPIC-ID"
# [END bigquerydatatransfer_run_notification]
transfer_config_name = orig_transfer_config_name
pubsub_topic = orig_pubsub_topic
# [START bigquerydatatransfer_run_notification]
from google.cloud import bigquery_datatransfer
from google.protobuf import field_mask_pb2
transfer_client = bigquery_datatransfer.DataTransferServiceClient()
transfer_config = bigquery_datatransfer.TransferConfig(name=transfer_config_name)
transfer_config.notification_pubsub_topic = pubsub_topic
update_mask = field_mask_pb2.FieldMask(paths=["notification_pubsub_topic"])
transfer_config = transfer_client.update_transfer_config(
{"transfer_config": transfer_config, "update_mask": update_mask}
)
print(f"Updated config: '{transfer_config.name}'")
print(f"Notification Pub/Sub topic: '{transfer_config.notification_pubsub_topic}'")
# [END bigquerydatatransfer_run_notification]
# Return the config name for testing purposes, so that it can be deleted.
return transfer_config
| 43.177778
| 87
| 0.784869
|
d8058f31cb7f09d7544c1d1440d32c94b61c8193
| 1,744
|
py
|
Python
|
models/base_model.py
|
artBoffin/GooeyBrain
|
60b08c713c97a4ae24ad9a47a73d69dab41d27ea
|
[
"Apache-2.0"
] | 2
|
2017-07-13T04:53:47.000Z
|
2020-08-20T03:48:46.000Z
|
models/base_model.py
|
artBoffin/GooeyBrain
|
60b08c713c97a4ae24ad9a47a73d69dab41d27ea
|
[
"Apache-2.0"
] | 11
|
2017-02-19T01:28:43.000Z
|
2022-03-11T23:15:40.000Z
|
models/base_model.py
|
artBoffin/GooeyBrain
|
60b08c713c97a4ae24ad9a47a73d69dab41d27ea
|
[
"Apache-2.0"
] | null | null | null |
from __future__ import print_function
import os
import sys
import time
from models.parameter import Parameter
from models.util import log, error
import tensorflow as tf
def time_string():
return time.strftime("%Y%m%d-%H%M%S")
class BaseModel:
def __init__(self, sess, params):
self.log("module ctor")
self.run_name = params["run_name"] if "run_name" in params else "model%s"%time_string()
self.checkpoint_dir = params["checkpoint_dir"] if "checkpoint_dir" in params else "./"
self.report = {}
def train(self):
raise ValueError("Model did not override train()")
def generate(self, event, data_dict):
raise ValueError("Model did not override generate()")
def log(self, msg):
log(self.__class__.__name__ + " :", msg)
def error(self, msg):
error(self.__class__.__name__ + " :", msg)
def save(self, sess, step):
self.saver.save(sess,
os.path.join(self.checkpoint_dir, "%s.model" % self.run_name),
global_step=step)
def load(self, sess):
print(" [*] Reading checkpoints...")
ckpt = tf.train.get_checkpoint_state(self.checkpoint_dir)
if ckpt and ckpt.model_checkpoint_path:
ckpt_name = os.path.basename(ckpt.model_checkpoint_path)
self.saver.restore(sess, os.path.join(self.checkpoint_dir, ckpt_name))
print(" [*] Success to read {}".format(ckpt_name))
return True
else:
print(" [*] Failed to find a checkpoint")
return False
@staticmethod
def parametersJSON(parameters):
arr = []
for p in parameters:
arr.append(p.getJson())
return arr
| 29.559322
| 95
| 0.617546
|
b9f7c889c22befb25287d6f9989d796751ceb096
| 1,878
|
py
|
Python
|
source/wizard.py
|
awiouy/service.librespot
|
016bc87c98ab6b990dca8874916cf34937677396
|
[
"MIT"
] | 3
|
2017-11-22T19:48:12.000Z
|
2017-12-31T00:31:22.000Z
|
source/wizard.py
|
awiouy/service.librespot
|
016bc87c98ab6b990dca8874916cf34937677396
|
[
"MIT"
] | null | null | null |
source/wizard.py
|
awiouy/service.librespot
|
016bc87c98ab6b990dca8874916cf34937677396
|
[
"MIT"
] | 1
|
2020-01-19T23:36:18.000Z
|
2020-01-19T23:36:18.000Z
|
################################################################################
# This file is part of LibreELEC - https://libreelec.tv
# Copyright (C) 2016 Team LibreELEC
#
# LibreELEC is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# LibreELEC is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with LibreELEC. If not, see <http://www.gnu.org/licenses/>.
################################################################################
from lib import alsaaudio as alsa
import xbmcaddon
import xbmcgui
if __name__ == "__main__":
addon = xbmcaddon.Addon("service.librespot")
dialog = xbmcgui.Dialog()
strings = addon.getLocalizedString
while True:
pcms = alsa.pcms()[1:]
if len(pcms) == 0:
dialog.ok(strings(30211), strings(30212))
break
pcmx = dialog.select(strings(30113), pcms)
if pcmx == -1:
break
pcm = pcms[pcmx]
pair = pcm.split(":CARD=")
device = pair[0]
card = pair[1].split(",")[0]
cardx = alsa.cards().index(card)
mixers = [mixer for mixer in alsa.mixers(cardindex=cardx, device=device)
if 'Playback Volume' in alsa.Mixer(control=mixer, cardindex=cardx).volumecap()]
if len(mixers) == 0:
mixer = ""
else:
mixerx = dialog.select(strings(30114), mixers)
if mixerx == -1:
continue
mixer = mixers[mixerx]
addon.setSetting("ls_m", mixer)
addon.setSetting("ls_o", pcm)
break
| 34.777778
| 87
| 0.615548
|
cda3dbbfde293430ac9f28465fa2c75ca7320ffd
| 9,894
|
py
|
Python
|
api/tacticalrmm/automation/views.py
|
matstocks/tacticalrmm
|
3269d047f08546c2a44cbabb3c4c27211804bdf5
|
[
"MIT"
] | 1
|
2020-08-14T20:42:31.000Z
|
2020-08-14T20:42:31.000Z
|
api/tacticalrmm/automation/views.py
|
3v1lW1th1n/tacticalrmm
|
ae96dba2aa6679fcccfc8a032158d0ea038f5423
|
[
"MIT"
] | 5
|
2021-04-08T19:44:31.000Z
|
2021-09-22T19:34:33.000Z
|
api/tacticalrmm/automation/views.py
|
3v1lW1th1n/tacticalrmm
|
ae96dba2aa6679fcccfc8a032158d0ea038f5423
|
[
"MIT"
] | null | null | null |
from django.db import DataError
from django.shortcuts import get_object_or_404
from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework import status
from .models import Policy
from agents.models import Agent
from scripts.models import Script
from clients.models import Client, Site
from checks.models import Check
from autotasks.models import AutomatedTask
from clients.serializers import ClientSerializer, TreeSerializer
from checks.serializers import CheckSerializer
from agents.serializers import AgentHostnameSerializer
from autotasks.serializers import TaskSerializer
from .serializers import (
PolicySerializer,
PolicyTableSerializer,
PolicyOverviewSerializer,
PolicyCheckStatusSerializer,
PolicyTaskStatusSerializer,
AutoTaskPolicySerializer,
)
from .tasks import (
generate_agent_checks_from_policies_task,
generate_agent_checks_by_location_task,
generate_agent_tasks_from_policies_task,
generate_agent_tasks_by_location_task,
run_win_policy_autotask_task,
)
class GetAddPolicies(APIView):
def get(self, request):
policies = Policy.objects.all()
return Response(PolicyTableSerializer(policies, many=True).data)
def post(self, request):
serializer = PolicySerializer(data=request.data, partial=True)
serializer.is_valid(raise_exception=True)
serializer.save()
return Response("ok")
class GetUpdateDeletePolicy(APIView):
def get(self, request, pk):
policy = get_object_or_404(Policy, pk=pk)
return Response(PolicySerializer(policy).data)
def put(self, request, pk):
policy = get_object_or_404(Policy, pk=pk)
old_active = policy.active
old_enforced = policy.enforced
serializer = PolicySerializer(instance=policy, data=request.data, partial=True)
serializer.is_valid(raise_exception=True)
saved_policy = serializer.save()
# Generate agent checks only if active and enforced were changed
if saved_policy.active != old_active or saved_policy.enforced != old_enforced:
generate_agent_checks_from_policies_task.delay(
policypk=policy.pk,
clear=(not saved_policy.active or not saved_policy.enforced),
)
# Genereate agent tasks if active was changed
if saved_policy.active != old_active:
generate_agent_tasks_from_policies_task.delay(
policypk=policy.pk, clear=(not saved_policy.active)
)
return Response("ok")
def delete(self, request, pk):
policy = Policy.objects.get(pk=pk)
# delete all managed policy checks off of agents
generate_agent_checks_from_policies_task.delay(policypk=policy.pk, clear=True)
generate_agent_tasks_from_policies_task.delay(policypk=policy.pk, clear=True)
policy.delete()
return Response("ok")
class PolicyAutoTask(APIView):
def get(self, request, pk):
policy = get_object_or_404(Policy, pk=pk)
return Response(AutoTaskPolicySerializer(policy).data)
def patch(self, request, task):
tasks = AutomatedTask.objects.filter(parent_task=task)
return Response(PolicyTaskStatusSerializer(tasks, many=True).data)
def put(self, request, task):
tasks = AutomatedTask.objects.filter(parent_task=task)
run_win_policy_autotask_task.delay([task.id for task in tasks])
return Response("Affected agent tasks will run shortly")
class PolicyCheck(APIView):
def get(self, request, pk):
checks = Check.objects.filter(policy__pk=pk, agent=None)
return Response(CheckSerializer(checks, many=True).data)
def patch(self, request, check):
checks = Check.objects.filter(parent_check=check)
return Response(PolicyCheckStatusSerializer(checks, many=True).data)
class OverviewPolicy(APIView):
def get(self, request):
clients = Client.objects.all()
return Response(PolicyOverviewSerializer(clients, many=True).data)
class GetRelated(APIView):
def get(self, request, pk):
response = {}
policy = (
Policy.objects.filter(pk=pk).prefetch_related("clients", "sites").first()
)
response["clients"] = ClientSerializer(policy.clients.all(), many=True).data
filtered_sites = list()
for client in policy.clients.all():
for site in client.sites.all():
if site not in policy.sites.all():
filtered_sites.append(site)
response["sites"] = TreeSerializer(
filtered_sites + list(policy.sites.all()), many=True
).data
response["agents"] = AgentHostnameSerializer(
policy.related_agents(), many=True
).data
return Response(response)
def post(self, request):
# Update Agents, Clients, Sites to Policy
related_type = request.data["type"]
pk = request.data["pk"]
if request.data["policy"] != 0:
policy = get_object_or_404(Policy, pk=request.data["policy"])
if related_type == "client":
client = get_object_or_404(Client, pk=pk)
# Check and see if policy changed and regenerate policies
if not client.policy or client.policy and client.policy.pk != policy.pk:
client.policy = policy
client.save()
generate_agent_checks_by_location_task.delay(
location={"client": client.pk}, clear=True
)
generate_agent_tasks_by_location_task.delay(
location={"client": client.pk}, clear=True
)
if related_type == "site":
site = get_object_or_404(Site, pk=pk)
# Check and see if policy changed and regenerate policies
if not site.policy or site.policy and site.policy.pk != policy.pk:
site.policy = policy
site.save()
generate_agent_checks_by_location_task.delay(
location={"client": site.client.client, "site": site.pk},
clear=True,
)
generate_agent_tasks_by_location_task.delay(
location={"client": site.client.client, "site": site.pk},
clear=True,
)
if related_type == "agent":
agent = get_object_or_404(Agent, pk=pk)
# Check and see if policy changed and regenerate policies
if not agent.policy or agent.policy and agent.policy.pk != policy.pk:
agent.policy = policy
agent.save()
agent.generate_checks_from_policies(clear=True)
agent.generate_tasks_from_policies(clear=True)
# If policy was cleared or blank
else:
if related_type == "client":
client = get_object_or_404(Client, pk=pk)
# Check if policy is not none and update it to None
if client.policy:
# Get old policy pk to regenerate the checks
old_pk = client.policy.pk
client.policy = None
client.save()
generate_agent_checks_by_location_task.delay(
location={"client": client.pk}, clear=True
)
generate_agent_tasks_by_location_task.delay(
location={"client": client.pk}, clear=True
)
if related_type == "site":
site = get_object_or_404(Site, pk=pk)
# Check if policy is not none and update it to None
if site.policy:
# Get old policy pk to regenerate the checks
old_pk = site.policy.pk
site.policy = None
site.save()
generate_agent_checks_by_location_task.delay(
location={"client": site.client.client, "site": site.pk},
clear=True,
)
generate_agent_tasks_by_location_task.delay(
location={"client": site.client.client, "site": site.pk},
clear=True,
)
if related_type == "agent":
agent = get_object_or_404(Agent, pk=pk)
if agent.policy:
agent.policy = None
agent.save()
agent.generate_checks_from_policies(clear=True)
agent.generate_tasks_from_policies(clear=True)
return Response("ok")
def patch(self, request):
related_type = request.data["type"]
pk = request.data["pk"]
if related_type == "agent":
policy = Policy.objects.filter(agents__pk=pk).first()
return Response(PolicySerializer(policy).data)
if related_type == "site":
policy = Policy.objects.filter(sites__pk=pk).first()
return Response(PolicySerializer(policy).data)
if related_type == "client":
policy = Policy.objects.filter(clients__pk=pk).first()
return Response(PolicySerializer(policy).data)
content = {"error": "Data was submitted incorrectly"}
return Response(content, status=status.HTTP_400_BAD_REQUEST)
| 36.780669
| 89
| 0.5943
|
81360a22f7edb6ba4b68b6a07573f1e517862f1e
| 20,907
|
py
|
Python
|
twisted/internet/test/test_unix.py
|
jMyles/twisted
|
a5108c0db829847bbfd4bf427868eb0b13da0465
|
[
"Unlicense",
"MIT"
] | 1
|
2015-08-18T06:57:28.000Z
|
2015-08-18T06:57:28.000Z
|
Lib/site-packages/twisted/internet/test/test_unix.py
|
adzhou/Python27
|
a7113b69d54a04cc780143241c2f1fe81939ad3a
|
[
"bzip2-1.0.6"
] | null | null | null |
Lib/site-packages/twisted/internet/test/test_unix.py
|
adzhou/Python27
|
a7113b69d54a04cc780143241c2f1fe81939ad3a
|
[
"bzip2-1.0.6"
] | 1
|
2020-11-01T20:40:01.000Z
|
2020-11-01T20:40:01.000Z
|
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Tests for implementations of L{IReactorUNIX}.
"""
from __future__ import division, absolute_import
from stat import S_IMODE
from os import stat, close, urandom
from tempfile import mktemp
from socket import AF_INET, SOCK_STREAM, socket
from pprint import pformat
from hashlib import md5
try:
from socket import AF_UNIX
except ImportError:
AF_UNIX = None
from zope.interface import implementer
from twisted.internet import interfaces
from twisted.internet.address import UNIXAddress
from twisted.internet.defer import Deferred, fail
from twisted.internet.endpoints import UNIXServerEndpoint, UNIXClientEndpoint
from twisted.internet.error import ConnectionClosed, FileDescriptorOverrun
from twisted.internet.interfaces import IFileDescriptorReceiver, IReactorUNIX
from twisted.internet.protocol import DatagramProtocol
from twisted.internet.protocol import ServerFactory, ClientFactory
from twisted.internet.task import LoopingCall
from twisted.internet.test.connectionmixins import EndpointCreator
from twisted.internet.test.reactormixins import ReactorBuilder
from twisted.internet.test.test_core import ObjectModelIntegrationMixin
from twisted.internet.test.test_tcp import StreamTransportTestsMixin
from twisted.internet.test.connectionmixins import ConnectableProtocol
from twisted.internet.test.connectionmixins import ConnectionTestsMixin
from twisted.internet.test.connectionmixins import StreamClientTestsMixin
from twisted.internet.test.connectionmixins import runProtocolsWithReactor
from twisted.python.compat import nativeString, _PY3, iteritems
from twisted.python.failure import Failure
from twisted.python.log import addObserver, removeObserver, err
from twisted.python.runtime import platform
from twisted.python.reflect import requireModule
if requireModule("twisted.python.sendmsg") is not None:
sendmsgSkip = None
else:
sendmsgSkip = (
"sendmsg extension unavailable, extended UNIX features disabled")
class UNIXFamilyMixin:
"""
Test-helper defining mixin for things related to AF_UNIX sockets.
"""
def _modeTest(self, methodName, path, factory):
"""
Assert that the mode of the created unix socket is set to the mode
specified to the reactor method.
"""
mode = 0o600
reactor = self.buildReactor()
unixPort = getattr(reactor, methodName)(path, factory, mode=mode)
unixPort.stopListening()
self.assertEqual(S_IMODE(stat(path).st_mode), mode)
def _abstractPath(case):
"""
Return a new, unique abstract namespace path to be listened on.
"""
return md5(urandom(100)).hexdigest()
class UNIXCreator(EndpointCreator):
"""
Create UNIX socket end points.
"""
requiredInterfaces = (interfaces.IReactorUNIX,)
def server(self, reactor):
"""
Construct a UNIX server endpoint.
"""
# self.mktemp() often returns a path which is too long to be used.
path = mktemp(suffix='.sock', dir='.')
return UNIXServerEndpoint(reactor, path)
def client(self, reactor, serverAddress):
"""
Construct a UNIX client endpoint.
"""
return UNIXClientEndpoint(reactor, serverAddress.name)
class SendFileDescriptor(ConnectableProtocol):
"""
L{SendFileDescriptorAndBytes} sends a file descriptor and optionally some
normal bytes and then closes its connection.
@ivar reason: The reason the connection was lost, after C{connectionLost}
is called.
"""
reason = None
def __init__(self, fd, data):
"""
@param fd: A C{int} giving a file descriptor to send over the
connection.
@param data: A C{str} giving data to send over the connection, or
C{None} if no data is to be sent.
"""
self.fd = fd
self.data = data
def connectionMade(self):
"""
Send C{self.fd} and, if it is not C{None}, C{self.data}. Then close the
connection.
"""
self.transport.sendFileDescriptor(self.fd)
if self.data:
self.transport.write(self.data)
self.transport.loseConnection()
def connectionLost(self, reason):
ConnectableProtocol.connectionLost(self, reason)
self.reason = reason
@implementer(IFileDescriptorReceiver)
class ReceiveFileDescriptor(ConnectableProtocol):
"""
L{ReceiveFileDescriptor} provides an API for waiting for file descriptors to
be received.
@ivar reason: The reason the connection was lost, after C{connectionLost}
is called.
@ivar waiting: A L{Deferred} which fires with a file descriptor once one is
received, or with a failure if the connection is lost with no descriptor
arriving.
"""
reason = None
waiting = None
def waitForDescriptor(self):
"""
Return a L{Deferred} which will fire with the next file descriptor
received, or with a failure if the connection is or has already been
lost.
"""
if self.reason is None:
self.waiting = Deferred()
return self.waiting
else:
return fail(self.reason)
def fileDescriptorReceived(self, descriptor):
"""
Fire the waiting Deferred, initialized by C{waitForDescriptor}, with the
file descriptor just received.
"""
self.waiting.callback(descriptor)
self.waiting = None
def dataReceived(self, data):
"""
Fail the waiting Deferred, if it has not already been fired by
C{fileDescriptorReceived}. The bytes sent along with a file descriptor
are guaranteed to be delivered to the protocol's C{dataReceived} method
only after the file descriptor has been delivered to the protocol's
C{fileDescriptorReceived}.
"""
if self.waiting is not None:
self.waiting.errback(Failure(Exception(
"Received bytes (%r) before descriptor." % (data,))))
self.waiting = None
def connectionLost(self, reason):
"""
Fail the waiting Deferred, initialized by C{waitForDescriptor}, if there
is one.
"""
ConnectableProtocol.connectionLost(self, reason)
if self.waiting is not None:
self.waiting.errback(reason)
self.waiting = None
self.reason = reason
class UNIXTestsBuilder(UNIXFamilyMixin, ReactorBuilder, ConnectionTestsMixin):
"""
Builder defining tests relating to L{IReactorUNIX}.
"""
requiredInterfaces = (IReactorUNIX,)
endpoints = UNIXCreator()
def test_mode(self):
"""
The UNIX socket created by L{IReactorUNIX.listenUNIX} is created with
the mode specified.
"""
self._modeTest('listenUNIX', self.mktemp(), ServerFactory())
def test_listenOnLinuxAbstractNamespace(self):
"""
On Linux, a UNIX socket path may begin with C{'\0'} to indicate a socket
in the abstract namespace. L{IReactorUNIX.listenUNIX} accepts such a
path.
"""
# Don't listen on a path longer than the maximum allowed.
path = _abstractPath(self)
reactor = self.buildReactor()
port = reactor.listenUNIX('\0' + path, ServerFactory())
self.assertEqual(port.getHost(), UNIXAddress('\0' + path))
if not platform.isLinux():
test_listenOnLinuxAbstractNamespace.skip = (
'Abstract namespace UNIX sockets only supported on Linux.')
def test_connectToLinuxAbstractNamespace(self):
"""
L{IReactorUNIX.connectUNIX} also accepts a Linux abstract namespace
path.
"""
path = _abstractPath(self)
reactor = self.buildReactor()
connector = reactor.connectUNIX('\0' + path, ClientFactory())
self.assertEqual(connector.getDestination(), UNIXAddress('\0' + path))
if not platform.isLinux():
test_connectToLinuxAbstractNamespace.skip = (
'Abstract namespace UNIX sockets only supported on Linux.')
def test_addresses(self):
"""
A client's transport's C{getHost} and C{getPeer} return L{UNIXAddress}
instances which have the filesystem path of the host and peer ends of
the connection.
"""
class SaveAddress(ConnectableProtocol):
def makeConnection(self, transport):
self.addresses = dict(
host=transport.getHost(), peer=transport.getPeer())
transport.loseConnection()
server = SaveAddress()
client = SaveAddress()
runProtocolsWithReactor(self, server, client, self.endpoints)
self.assertEqual(server.addresses['host'], client.addresses['peer'])
self.assertEqual(server.addresses['peer'], client.addresses['host'])
def test_sendFileDescriptor(self):
"""
L{IUNIXTransport.sendFileDescriptor} accepts an integer file descriptor
and sends a copy of it to the process reading from the connection.
"""
from socket import fromfd
s = socket()
s.bind(('', 0))
server = SendFileDescriptor(s.fileno(), b"junk")
client = ReceiveFileDescriptor()
d = client.waitForDescriptor()
def checkDescriptor(descriptor):
received = fromfd(descriptor, AF_INET, SOCK_STREAM)
# Thanks for the free dup, fromfd()
close(descriptor)
# If the sockets have the same local address, they're probably the
# same.
self.assertEqual(s.getsockname(), received.getsockname())
# But it would be cheating for them to be identified by the same
# file descriptor. The point was to get a copy, as we might get if
# there were two processes involved here.
self.assertNotEqual(s.fileno(), received.fileno())
d.addCallback(checkDescriptor)
d.addErrback(err, "Sending file descriptor encountered a problem")
d.addBoth(lambda ignored: server.transport.loseConnection())
runProtocolsWithReactor(self, server, client, self.endpoints)
if sendmsgSkip is not None:
test_sendFileDescriptor.skip = sendmsgSkip
def test_sendFileDescriptorTriggersPauseProducing(self):
"""
If a L{IUNIXTransport.sendFileDescriptor} call fills up the send buffer,
any registered producer is paused.
"""
class DoesNotRead(ConnectableProtocol):
def connectionMade(self):
self.transport.pauseProducing()
class SendsManyFileDescriptors(ConnectableProtocol):
paused = False
def connectionMade(self):
self.socket = socket()
self.transport.registerProducer(self, True)
def sender():
self.transport.sendFileDescriptor(self.socket.fileno())
self.transport.write(b"x")
self.task = LoopingCall(sender)
self.task.clock = self.transport.reactor
self.task.start(0).addErrback(err, "Send loop failure")
def stopProducing(self):
self._disconnect()
def resumeProducing(self):
self._disconnect()
def pauseProducing(self):
self.paused = True
self.transport.unregisterProducer()
self._disconnect()
def _disconnect(self):
self.task.stop()
self.transport.abortConnection()
self.other.transport.abortConnection()
server = SendsManyFileDescriptors()
client = DoesNotRead()
server.other = client
runProtocolsWithReactor(self, server, client, self.endpoints)
self.assertTrue(
server.paused, "sendFileDescriptor producer was not paused")
if sendmsgSkip is not None:
test_sendFileDescriptorTriggersPauseProducing.skip = sendmsgSkip
def test_fileDescriptorOverrun(self):
"""
If L{IUNIXTransport.sendFileDescriptor} is used to queue a greater
number of file descriptors than the number of bytes sent using
L{ITransport.write}, the connection is closed and the protocol connected
to the transport has its C{connectionLost} method called with a failure
wrapping L{FileDescriptorOverrun}.
"""
cargo = socket()
server = SendFileDescriptor(cargo.fileno(), None)
client = ReceiveFileDescriptor()
result = []
d = client.waitForDescriptor()
d.addBoth(result.append)
d.addBoth(lambda ignored: server.transport.loseConnection())
runProtocolsWithReactor(self, server, client, self.endpoints)
self.assertIsInstance(result[0], Failure)
result[0].trap(ConnectionClosed)
self.assertIsInstance(server.reason.value, FileDescriptorOverrun)
if sendmsgSkip is not None:
test_fileDescriptorOverrun.skip = sendmsgSkip
def test_avoidLeakingFileDescriptors(self):
"""
If associated with a protocol which does not provide
L{IFileDescriptorReceiver}, file descriptors received by the
L{IUNIXTransport} implementation are closed and a warning is emitted.
"""
# To verify this, establish a connection. Send one end of the
# connection over the IUNIXTransport implementation. After the copy
# should no longer exist, close the original. If the opposite end of
# the connection decides the connection is closed, the copy does not
# exist.
from socket import socketpair
probeClient, probeServer = socketpair()
events = []
addObserver(events.append)
self.addCleanup(removeObserver, events.append)
class RecordEndpointAddresses(SendFileDescriptor):
def connectionMade(self):
self.hostAddress = self.transport.getHost()
self.peerAddress = self.transport.getPeer()
SendFileDescriptor.connectionMade(self)
server = RecordEndpointAddresses(probeClient.fileno(), b"junk")
client = ConnectableProtocol()
runProtocolsWithReactor(self, server, client, self.endpoints)
# Get rid of the original reference to the socket.
probeClient.close()
# A non-blocking recv will return "" if the connection is closed, as
# desired. If the connection has not been closed, because the
# duplicate file descriptor is still open, it will fail with EAGAIN
# instead.
probeServer.setblocking(False)
self.assertEqual(b"", probeServer.recv(1024))
# This is a surprising circumstance, so it should be logged.
format = (
"%(protocolName)s (on %(hostAddress)r) does not "
"provide IFileDescriptorReceiver; closing file "
"descriptor received (from %(peerAddress)r).")
clsName = "ConnectableProtocol"
# Reverse host and peer, since the log event is from the client
# perspective.
expectedEvent = dict(hostAddress=server.peerAddress,
peerAddress=server.hostAddress,
protocolName=clsName,
format=format)
for logEvent in events:
for k, v in iteritems(expectedEvent):
if v != logEvent.get(k):
break
else:
# No mismatches were found, stop looking at events
break
else:
# No fully matching events were found, fail the test.
self.fail(
"Expected event (%s) not found in logged events (%s)" % (
expectedEvent, pformat(events,)))
if sendmsgSkip is not None:
test_avoidLeakingFileDescriptors.skip = sendmsgSkip
def test_descriptorDeliveredBeforeBytes(self):
"""
L{IUNIXTransport.sendFileDescriptor} sends file descriptors before
L{ITransport.write} sends normal bytes.
"""
@implementer(IFileDescriptorReceiver)
class RecordEvents(ConnectableProtocol):
def connectionMade(self):
ConnectableProtocol.connectionMade(self)
self.events = []
def fileDescriptorReceived(innerSelf, descriptor):
self.addCleanup(close, descriptor)
innerSelf.events.append(type(descriptor))
def dataReceived(self, data):
self.events.extend(data)
cargo = socket()
server = SendFileDescriptor(cargo.fileno(), b"junk")
client = RecordEvents()
runProtocolsWithReactor(self, server, client, self.endpoints)
self.assertEqual(int, client.events[0])
if _PY3:
self.assertEqual(b"junk", bytes(client.events[1:]))
else:
self.assertEqual(b"junk", b"".join(client.events[1:]))
if sendmsgSkip is not None:
test_descriptorDeliveredBeforeBytes.skip = sendmsgSkip
class UNIXDatagramTestsBuilder(UNIXFamilyMixin, ReactorBuilder):
"""
Builder defining tests relating to L{IReactorUNIXDatagram}.
"""
requiredInterfaces = (interfaces.IReactorUNIXDatagram,)
# There's no corresponding test_connectMode because the mode parameter to
# connectUNIXDatagram has been completely ignored since that API was first
# introduced.
def test_listenMode(self):
"""
The UNIX socket created by L{IReactorUNIXDatagram.listenUNIXDatagram}
is created with the mode specified.
"""
self._modeTest('listenUNIXDatagram', self.mktemp(), DatagramProtocol())
def test_listenOnLinuxAbstractNamespace(self):
"""
On Linux, a UNIX socket path may begin with C{'\0'} to indicate a socket
in the abstract namespace. L{IReactorUNIX.listenUNIXDatagram} accepts
such a path.
"""
path = _abstractPath(self)
reactor = self.buildReactor()
port = reactor.listenUNIXDatagram('\0' + path, DatagramProtocol())
self.assertEqual(port.getHost(), UNIXAddress('\0' + path))
if not platform.isLinux():
test_listenOnLinuxAbstractNamespace.skip = (
'Abstract namespace UNIX sockets only supported on Linux.')
class UNIXPortTestsBuilder(ReactorBuilder, ObjectModelIntegrationMixin,
StreamTransportTestsMixin):
"""
Tests for L{IReactorUNIX.listenUnix}
"""
requiredInterfaces = (interfaces.IReactorUNIX,)
def getListeningPort(self, reactor, factory):
"""
Get a UNIX port from a reactor
"""
# self.mktemp() often returns a path which is too long to be used.
path = mktemp(suffix='.sock', dir='.')
return reactor.listenUNIX(path, factory)
def getExpectedStartListeningLogMessage(self, port, factory):
"""
Get the message expected to be logged when a UNIX port starts listening.
"""
return "%s starting on %r" % (factory,
nativeString(port.getHost().name))
def getExpectedConnectionLostLogMsg(self, port):
"""
Get the expected connection lost message for a UNIX port
"""
return "(UNIX Port %s Closed)" % (nativeString(port.getHost().name),)
globals().update(UNIXTestsBuilder.makeTestCaseClasses())
globals().update(UNIXDatagramTestsBuilder.makeTestCaseClasses())
globals().update(UNIXPortTestsBuilder.makeTestCaseClasses())
class UnixClientTestsBuilder(ReactorBuilder, StreamClientTestsMixin):
"""
Define tests for L{IReactorUNIX.connectUNIX}.
"""
requiredInterfaces = (IReactorUNIX,)
_path = None
@property
def path(self):
"""
Return a path usable by C{connectUNIX} and C{listenUNIX}.
@return: A path instance, built with C{_abstractPath}.
"""
if self._path is None:
self._path = _abstractPath(self)
return self._path
def listen(self, reactor, factory):
"""
Start an UNIX server with the given C{factory}.
@param reactor: The reactor to create the UNIX port in.
@param factory: The server factory.
@return: A UNIX port instance.
"""
return reactor.listenUNIX(self.path, factory)
def connect(self, reactor, factory):
"""
Start an UNIX client with the given C{factory}.
@param reactor: The reactor to create the connection in.
@param factory: The client factory.
@return: A UNIX connector instance.
"""
return reactor.connectUNIX(self.path, factory)
globals().update(UnixClientTestsBuilder.makeTestCaseClasses())
| 34.386513
| 80
| 0.651696
|
5c36b21ae55acb34e150894ae32e7aadf102f20e
| 5,532
|
py
|
Python
|
Adafruit/Adafruit_I2C.py
|
jelly/prometheus-bmp180-exporter
|
1ff0dd880bad3c899a1b7034062fac0d6eff4dbc
|
[
"MIT"
] | null | null | null |
Adafruit/Adafruit_I2C.py
|
jelly/prometheus-bmp180-exporter
|
1ff0dd880bad3c899a1b7034062fac0d6eff4dbc
|
[
"MIT"
] | null | null | null |
Adafruit/Adafruit_I2C.py
|
jelly/prometheus-bmp180-exporter
|
1ff0dd880bad3c899a1b7034062fac0d6eff4dbc
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python
import re
import smbus
# ===========================================================================
# Adafruit_I2C Class
# ===========================================================================
class Adafruit_I2C(object):
@staticmethod
def getPiRevision():
"Gets the version number of the Raspberry Pi board"
# Revision list available at: http://elinux.org/RPi_HardwareHistory#Board_Revision_History
try:
with open('/proc/cpuinfo', 'r') as infile:
for line in infile:
# Match a line of the form "Revision : 0002" while ignoring extra
# info in front of the revsion (like 1000 when the Pi was over-volted).
match = re.match('Revision\s+:\s+.*(\w{4})$', line)
if match and match.group(1) in ['0000', '0002', '0003']:
# Return revision 1 if revision ends with 0000, 0002 or 0003.
return 1
elif match:
# Assume revision 2 if revision ends with any other 4 chars.
return 2
# Couldn't find the revision, assume revision 0 like older code for compatibility.
return 0
except:
return 0
@staticmethod
def getPiI2CBusNumber():
# Gets the I2C bus number /dev/i2c#
return 1 if Adafruit_I2C.getPiRevision() > 1 else 0
def __init__(self, address, busnum=-1, debug=False):
self.address = address
# By default, the correct I2C bus is auto-detected using /proc/cpuinfo
# Alternatively, you can hard-code the bus version below:
# self.bus = smbus.SMBus(0); # Force I2C0 (early 256MB Pi's)
# self.bus = smbus.SMBus(1); # Force I2C1 (512MB Pi's)
self.bus = smbus.SMBus(busnum if busnum >= 0 else Adafruit_I2C.getPiI2CBusNumber())
self.debug = debug
def reverseByteOrder(self, data):
"Reverses the byte order of an int (16-bit) or long (32-bit) value"
# Courtesy Vishal Sapre
byteCount = len(hex(data)[2:].replace('L','')[::2])
val = 0
for i in range(byteCount):
val = (val << 8) | (data & 0xff)
data >>= 8
return val
def errMsg(self):
print("Error accessing 0x%02X: Check your I2C address" % self.address)
return -1
def write8(self, reg, value):
"Writes an 8-bit value to the specified register/address"
try:
self.bus.write_byte_data(self.address, reg, value)
if self.debug:
print("I2C: Wrote 0x%02X to register 0x%02X" % (value, reg))
except IOError as err:
return self.errMsg()
def write16(self, reg, value):
"Writes a 16-bit value to the specified register/address pair"
try:
self.bus.write_word_data(self.address, reg, value)
if self.debug:
print(("I2C: Wrote 0x%02X to register pair 0x%02X,0x%02X" %
(value, reg, reg+1)))
except IOError as err:
return self.errMsg()
def writeRaw8(self, value):
"Writes an 8-bit value on the bus"
try:
self.bus.write_byte(self.address, value)
if self.debug:
print("I2C: Wrote 0x%02X" % value)
except IOError as err:
return self.errMsg()
def writeList(self, reg, list):
"Writes an array of bytes using I2C format"
try:
if self.debug:
print("I2C: Writing list to register 0x%02X:" % reg)
print(list)
self.bus.write_i2c_block_data(self.address, reg, list)
except IOError as err:
return self.errMsg()
def readList(self, reg, length):
"Read a list of bytes from the I2C device"
try:
results = self.bus.read_i2c_block_data(self.address, reg, length)
if self.debug:
print(("I2C: Device 0x%02X returned the following from reg 0x%02X" %
(self.address, reg)))
print(results)
return results
except IOError as err:
return self.errMsg()
def readU8(self, reg):
"Read an unsigned byte from the I2C device"
try:
result = self.bus.read_byte_data(self.address, reg)
if self.debug:
print(("I2C: Device 0x%02X returned 0x%02X from reg 0x%02X" %
(self.address, result & 0xFF, reg)))
return result
except IOError as err:
return self.errMsg()
def readS8(self, reg):
"Reads a signed byte from the I2C device"
try:
result = self.bus.read_byte_data(self.address, reg)
if result > 127: result -= 256
if self.debug:
print(("I2C: Device 0x%02X returned 0x%02X from reg 0x%02X" %
(self.address, result & 0xFF, reg)))
return result
except IOError as err:
return self.errMsg()
def readU16(self, reg, little_endian=True):
"Reads an unsigned 16-bit value from the I2C device"
try:
result = self.bus.read_word_data(self.address,reg)
# Swap bytes if using big endian because read_word_data assumes little
# endian on ARM (little endian) systems.
if not little_endian:
result = ((result << 8) & 0xFF00) + (result >> 8)
if (self.debug):
print("I2C: Device 0x%02X returned 0x%04X from reg 0x%02X" % (self.address, result & 0xFFFF, reg))
return result
except IOError as err:
return self.errMsg()
def readS16(self, reg, little_endian=True):
"Reads a signed 16-bit value from the I2C device"
try:
result = self.readU16(reg,little_endian)
if result > 32767: result -= 65536
return result
except IOError as err:
return self.errMsg()
if __name__ == '__main__':
try:
bus = Adafruit_I2C(address=0)
print("Default I2C bus is accessible")
except:
print("Error accessing default I2C bus")
| 34.148148
| 106
| 0.61804
|
7b55c5d8efe63739152a27914263b7f0ad76c5f2
| 5,573
|
py
|
Python
|
electrum/gui/qt/request_list.py
|
sherlockcoin/electrum
|
fa9e4cf5ed15ae44331a70954e69bc677c529357
|
[
"MIT"
] | 1
|
2020-03-28T04:25:54.000Z
|
2020-03-28T04:25:54.000Z
|
electrum/gui/qt/request_list.py
|
Matthelonianxl/electrum-1
|
1f14894c43ca4193fb07c202f22e421365c50d2d
|
[
"MIT"
] | 1
|
2021-11-15T17:47:57.000Z
|
2021-11-15T17:47:57.000Z
|
electrum/gui/qt/request_list.py
|
Matthelonianxl/electrum-1
|
1f14894c43ca4193fb07c202f22e421365c50d2d
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
#
# Electrum - lightweight Bitcoin client
# Copyright (C) 2015 Thomas Voegtlin
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from electrum.i18n import _
from electrum.util import format_time, age
from electrum.plugin import run_hook
from electrum.paymentrequest import PR_UNKNOWN
from PyQt5.QtGui import *
from PyQt5.QtCore import *
from PyQt5.QtWidgets import QTreeWidgetItem, QMenu
from .util import MyTreeWidget, pr_tooltips, pr_icons
class RequestList(MyTreeWidget):
filter_columns = [0, 1, 2, 3, 4] # Date, Account, Address, Description, Amount
def __init__(self, parent):
MyTreeWidget.__init__(self, parent, self.create_menu, [_('Date'), _('Address'), '', _('Description'), _('Amount'), _('Status')], 3)
self.currentItemChanged.connect(self.item_changed)
self.itemClicked.connect(self.item_changed)
self.setSortingEnabled(True)
self.setColumnWidth(0, 180)
self.hideColumn(1)
def item_changed(self, item):
if item is None:
return
if not item.isSelected():
return
addr = str(item.text(1))
req = self.wallet.receive_requests.get(addr)
if req is None:
self.update()
return
expires = age(req['time'] + req['exp']) if req.get('exp') else _('Never')
amount = req['amount']
message = self.wallet.labels.get(addr, '')
self.parent.receive_address_e.setText(addr)
self.parent.receive_message_e.setText(message)
self.parent.receive_amount_e.setAmount(amount)
self.parent.expires_combo.hide()
self.parent.expires_label.show()
self.parent.expires_label.setText(expires)
self.parent.new_request_button.setEnabled(True)
def on_update(self):
self.wallet = self.parent.wallet
# hide receive tab if no receive requests available
b = len(self.wallet.receive_requests) > 0
self.setVisible(b)
self.parent.receive_requests_label.setVisible(b)
if not b:
self.parent.expires_label.hide()
self.parent.expires_combo.show()
# update the receive address if necessary
current_address = self.parent.receive_address_e.text()
domain = self.wallet.get_receiving_addresses()
addr = self.wallet.get_unused_address()
if not current_address in domain and addr:
self.parent.set_receive_address(addr)
self.parent.new_request_button.setEnabled(addr != current_address)
# clear the list and fill it again
self.clear()
for req in self.wallet.get_sorted_requests(self.config):
address = req['address']
if address not in domain:
continue
timestamp = req.get('time', 0)
amount = req.get('amount')
expiration = req.get('exp', None)
message = req.get('memo', '')
date = format_time(timestamp)
status = req.get('status')
signature = req.get('sig')
requestor = req.get('name', '')
amount_str = self.parent.format_amount(amount) if amount else ""
item = QTreeWidgetItem([date, address, '', message, amount_str, pr_tooltips.get(status,'')])
if signature is not None:
item.setIcon(2, self.icon_cache.get(":icons/seal.png"))
item.setToolTip(2, 'signed by '+ requestor)
if status is not PR_UNKNOWN:
item.setIcon(6, self.icon_cache.get(pr_icons.get(status)))
self.addTopLevelItem(item)
def create_menu(self, position):
item = self.itemAt(position)
if not item:
return
addr = str(item.text(1))
req = self.wallet.receive_requests.get(addr)
if req is None:
self.update()
return
column = self.currentColumn()
column_title = self.headerItem().text(column)
column_data = item.text(column)
menu = QMenu(self)
menu.addAction(_("Copy {}").format(column_title), lambda: self.parent.app.clipboard().setText(column_data))
menu.addAction(_("Copy URI"), lambda: self.parent.view_and_paste('URI', '', self.parent.get_request_URI(addr)))
menu.addAction(_("Save as BIP70 file"), lambda: self.parent.export_payment_request(addr))
menu.addAction(_("Delete"), lambda: self.parent.delete_payment_request(addr))
run_hook('receive_list_menu', menu, addr)
menu.exec_(self.viewport().mapToGlobal(position))
| 42.869231
| 139
| 0.662121
|
3168018eebe9a0d36e2c9c94870ff2c454a60dcc
| 1,781
|
py
|
Python
|
src/api/datamanage/utils/meta_sync/client.py
|
Chromico/bk-base
|
be822d9bbee544a958bed4831348185a75604791
|
[
"MIT"
] | 84
|
2021-06-30T06:20:23.000Z
|
2022-03-22T03:05:49.000Z
|
src/api/datamanage/utils/meta_sync/client.py
|
Chromico/bk-base
|
be822d9bbee544a958bed4831348185a75604791
|
[
"MIT"
] | 7
|
2021-06-30T06:21:16.000Z
|
2022-03-29T07:36:13.000Z
|
src/api/datamanage/utils/meta_sync/client.py
|
Chromico/bk-base
|
be822d9bbee544a958bed4831348185a75604791
|
[
"MIT"
] | 40
|
2021-06-30T06:21:26.000Z
|
2022-03-29T12:42:26.000Z
|
# coding=utf-8
"""
Tencent is pleased to support the open source community by making BK-BASE 蓝鲸基础平台 available.
Copyright (C) 2021 THL A29 Limited, a Tencent company. All rights reserved.
BK-BASE 蓝鲸基础平台 is licensed under the MIT License.
License for BK-BASE 蓝鲸基础平台:
--------------------------------------------------------------------
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial
portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT
LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
from metadata_client import MetadataClient, DEFAULT_SETTINGS
from datamanage.pizza_settings import ZK_ADDR, META_ACCESS_RPC_ENDPOINT
def metadata_client():
# meta sync conf
settings = DEFAULT_SETTINGS.copy()
settings.ZK_ADDR = ZK_ADDR
settings.METADATA_HA = False
settings.META_ACCESS_RPC_ENDPOINT = META_ACCESS_RPC_ENDPOINT
metadata_client = MetadataClient(settings)
return metadata_client
| 55.65625
| 111
| 0.769792
|
b41e90985340897521f2a06bb74e14a3e4f5e259
| 4,444
|
py
|
Python
|
Week 5/Object-oriented Programming/Defining New Classes/topic.py
|
chanchanchong/Crash-Course-On-Python
|
ce98bdd5c5355a582c76b864dae1c5eecdf6ea44
|
[
"MIT"
] | null | null | null |
Week 5/Object-oriented Programming/Defining New Classes/topic.py
|
chanchanchong/Crash-Course-On-Python
|
ce98bdd5c5355a582c76b864dae1c5eecdf6ea44
|
[
"MIT"
] | null | null | null |
Week 5/Object-oriented Programming/Defining New Classes/topic.py
|
chanchanchong/Crash-Course-On-Python
|
ce98bdd5c5355a582c76b864dae1c5eecdf6ea44
|
[
"MIT"
] | null | null | null |
class Apple:
pass
class Apple:
color = ""
flavor = ""
jonagold = Apple()
jonagold.color = "red"
jonagold.flavor = "sweet"
print(jonagold.flavor)
print(jonagold.color)
golden = Apple()
golden.color = "Yellow"
golden.flavor = "Soft"
# We called out earlier that the point of object oriented programming is to
# help define a real-world concept in a way that the computer understands.
# Defining a real-world concept and code can be tricky. So let's look at how we
# might go about representing a concept in Python code. We'll take it step-by-
# step and keep it simple. Let's take our apple example from earlier. We could
# use this code to define a basic Apple class. Class Apple: pass. Sure, it doesn't
# look like much but with these two lines we've defined our first-class. Let's
# check out the syntax. In Python, we use the class reserved keyword to tell
# the computer that we're starting a new class. We follow this with the name
# of the class and a colon. The Python style guidelines recommend that class
# names should start with a capital letter. So we'll be using that convention. In
# this case, our class is called Apple. Class definitions follow the same pattern
# of other blocks we've seen before like functions, loops, or conditional
# branches. After the line with the class definition comes the body of the class,
# which is indented to the right. In this case, we haven't added anything to the
# body yet, so we use the pass keyword, to show the body is empty. We
# can also use the same keyword as a placeholder in any empty Python block.
# So how might expand our definition of the apple class? Well, it would
# probably have the same attributes that represent the information we want
# to associate with an apple like color and flavor. We can add that information
# like this. Class Apple: color, we'll set that to an empty string. Same with
# flavor. We'll set that to an empty string for now.
# So here we're defining two attributes: color and flavor. We define them as
# strings because that's what we expect these attributes to be. At the
# moment, they're empty strings, since we don't know what values these
# attributes will have. See how we don't need the pass keyword anymore now
# that we've got an actual body for the class. All right. Now that we've got an
# Apple class and some attributes, let see our Apple in action.
# Here, we're creating a new instance of our Apple class and assigning it to a
# variable called jonagold. Check out the syntax. To create a new instance of
# any class, we call the name of the class as if it were a function. Now that
# we've got our shiny new apple object, let's set the values of the attributes.
# All right. We've just set the color and the flavor as string values. To check
# that it worked, let's try retrieving them both and printing them to the
# screen.
# Print(jonagold.color). Print (jonagold.flavor). The syntax used to access the
# attributes is called dot notation because of the dot used in the expression.
# Dot notation lets you access any of the abilities that the object might have,
# called methods or information that it might store called attributes, like
# flavor. The attributes and methods of some objects can be other objects and
# can have attributes and methods of their own. For example. we could use
# the upper method to turn the string of the color attribute to uppercase. So
# print(jonagold.color.upper()).
# So far we've created one instance of the Apple class and set its attributes
# and checked that they are now correctly set. Now, we could create a new
# instance of the Apple class with different attributes. Golden equals Apple.
# Golder.color, we'll set that to yellow and golden.flavor equals soft. Both
# golden and jonagold are instances of the Apple class. They have the same
# attributes, color and flavor. But those attributes have different values.
# Congrats. You've learned how to create your own classes. Let's check that
# we've got all this down with a quick quiz. After that, we're going to learn
# how to define new method for a class.
# Want to give this a go? Fill in the blanks in the code to make it print a poem.
class Flower:
color = "unknown"
rose = Flower()
rose.color = "red"
violet = Flower()
violet.color = "violet"
this_pun_is_for_you = "This poem is for you"
print("Roses are {}".format(rose.color))
print("violets are {}".format(violet.color))
print(this_pun_is_for_you)
| 48.835165
| 82
| 0.756076
|
c1bbb3a71e0fd4439f3afc630e5651a94be3988f
| 24,541
|
py
|
Python
|
conans/test/integration/toolchains/gnu/test_pkgconfigdeps.py
|
Mu-L/conan
|
7c24ec4bbd6e8c16cdcd879403aae742689bc36a
|
[
"MIT"
] | 1
|
2019-11-04T17:23:09.000Z
|
2019-11-04T17:23:09.000Z
|
conans/test/integration/toolchains/gnu/test_pkgconfigdeps.py
|
Mu-L/conan
|
7c24ec4bbd6e8c16cdcd879403aae742689bc36a
|
[
"MIT"
] | null | null | null |
conans/test/integration/toolchains/gnu/test_pkgconfigdeps.py
|
Mu-L/conan
|
7c24ec4bbd6e8c16cdcd879403aae742689bc36a
|
[
"MIT"
] | null | null | null |
import glob
import os
import textwrap
from conans.test.assets.genconanfile import GenConanfile
from conans.test.utils.tools import TestClient
from conans.util.files import load
def get_requires_from_content(content):
for line in content.splitlines():
if "Requires:" in line:
return line
return ""
def test_pkg_config_dirs():
# https://github.com/conan-io/conan/issues/2756
conanfile = textwrap.dedent("""
import os
from conan import ConanFile
class PkgConfigConan(ConanFile):
name = "MyLib"
version = "0.1"
def package_info(self):
self.cpp_info.frameworkdirs = []
self.cpp_info.filter_empty = False
libname = "mylib"
fake_dir = os.path.join("/", "my_absoulte_path", "fake")
include_dir = os.path.join(fake_dir, libname, "include")
lib_dir = os.path.join(fake_dir, libname, "lib")
lib_dir2 = os.path.join(self.package_folder, "lib2")
self.cpp_info.includedirs = [include_dir]
self.cpp_info.libdirs = [lib_dir, lib_dir2]
""")
client = TestClient()
client.save({"conanfile.py": conanfile})
client.run("create .")
client.run("install MyLib/0.1@ -g PkgConfigDeps")
pc_path = os.path.join(client.current_folder, "MyLib.pc")
assert os.path.exists(pc_path) is True
pc_content = load(pc_path)
assert 'Name: MyLib' in pc_content
assert 'Description: Conan package: MyLib' in pc_content
assert 'Version: 0.1' in pc_content
assert 'Libs: -L"${libdir1}" -L"${libdir2}"' in pc_content
assert 'Cflags: -I"${includedir1}"' in pc_content
def assert_is_abs(path):
assert os.path.isabs(path) is True
for line in pc_content.splitlines():
if line.startswith("includedir1="):
assert_is_abs(line[len("includedir1="):])
assert line.endswith("include")
elif line.startswith("libdir1="):
assert_is_abs(line[len("libdir1="):])
assert line.endswith("lib")
elif line.startswith("libdir2="):
assert "${prefix}/lib2" in line
def test_empty_dirs():
# Adding in package_info all the empty directories
conanfile = textwrap.dedent("""
import os
from conan import ConanFile
class PkgConfigConan(ConanFile):
name = "MyLib"
version = "0.1"
def package_info(self):
self.cpp_info.includedirs = []
self.cpp_info.libdirs = []
self.cpp_info.bindirs = []
self.cpp_info.libs = []
self.cpp_info.frameworkdirs = []
""")
client = TestClient()
client.save({"conanfile.py": conanfile})
client.run("create .")
client.run("install MyLib/0.1@ -g PkgConfigDeps")
pc_path = os.path.join(client.current_folder, "MyLib.pc")
assert os.path.exists(pc_path) is True
pc_content = load(pc_path)
expected = textwrap.dedent("""
Name: MyLib
Description: Conan package: MyLib
Version: 0.1
Libs:%s
Cflags: """ % " ") # ugly hack for trailing whitespace removed by IDEs
assert "\n".join(pc_content.splitlines()[1:]) == expected
def test_system_libs():
conanfile = textwrap.dedent("""
from conan import ConanFile
from conans.tools import save
import os
class PkgConfigConan(ConanFile):
name = "MyLib"
version = "0.1"
def package(self):
save(os.path.join(self.package_folder, "lib", "file"), "")
def package_info(self):
self.cpp_info.libs = ["mylib1", "mylib2"]
self.cpp_info.system_libs = ["system_lib1", "system_lib2"]
""")
client = TestClient()
client.save({"conanfile.py": conanfile})
client.run("create .")
client.run("install MyLib/0.1@ -g PkgConfigDeps")
pc_content = client.load("MyLib.pc")
assert 'Libs: -L"${libdir1}" -lmylib1 -lmylib2 -lsystem_lib1 -lsystem_lib2' in pc_content
def test_multiple_include():
# https://github.com/conan-io/conan/issues/7056
conanfile = textwrap.dedent("""
from conan import ConanFile
from conans.tools import save
import os
class PkgConfigConan(ConanFile):
def package(self):
for p in ["inc1", "inc2", "inc3/foo", "lib1", "lib2"]:
save(os.path.join(self.package_folder, p, "file"), "")
def package_info(self):
self.cpp_info.includedirs = ["inc1", "inc2", "inc3/foo"]
self.cpp_info.libdirs = ["lib1", "lib2"]
""")
client = TestClient()
client.save({"conanfile.py": conanfile})
client.run("create . pkg/0.1@")
client.run("install pkg/0.1@ -g PkgConfigDeps")
pc_content = client.load("pkg.pc")
assert "includedir1=${prefix}/inc1" in pc_content
assert "includedir2=${prefix}/inc2" in pc_content
assert "includedir3=${prefix}/inc3/foo" in pc_content
assert "libdir1=${prefix}/lib1" in pc_content
assert "libdir2=${prefix}/lib2" in pc_content
assert 'Libs: -L"${libdir1}" -L"${libdir2}"' in pc_content
assert 'Cflags: -I"${includedir1}" -I"${includedir2}" -I"${includedir3}"' in pc_content
def test_custom_content():
# https://github.com/conan-io/conan/issues/7661
conanfile = textwrap.dedent("""
from conan import ConanFile
from conans.tools import save
import os
import textwrap
class PkgConfigConan(ConanFile):
def package(self):
save(os.path.join(self.package_folder, "include" ,"file"), "")
save(os.path.join(self.package_folder, "lib" ,"file"), "")
def package_info(self):
custom_content = textwrap.dedent(\"""
datadir=${prefix}/share
schemasdir=${datadir}/mylib/schemas
bindir=${prefix}/bin
\""")
self.cpp_info.set_property("pkg_config_custom_content", custom_content)
self.cpp_info.includedirs = ["include"]
self.cpp_info.libdirs = ["lib"]
""")
client = TestClient()
client.save({"conanfile.py": conanfile})
client.run("create . pkg/0.1@")
client.run("install pkg/0.1@ -g PkgConfigDeps")
pc_content = client.load("pkg.pc")
assert "libdir1=${prefix}/lib" in pc_content
assert "datadir=${prefix}/share" in pc_content
assert "schemasdir=${datadir}/mylib/schemas" in pc_content
assert "bindir=${prefix}/bin" in pc_content
assert "Name: pkg" in pc_content
def test_custom_content_and_version_components():
conanfile = textwrap.dedent("""
from conan import ConanFile
from conans.tools import save
import os
import textwrap
class PkgConfigConan(ConanFile):
def package_info(self):
self.cpp_info.components["mycomponent"].set_property("pkg_config_custom_content",
"componentdir=${prefix}/mydir")
self.cpp_info.components["mycomponent"].set_property("component_version",
"19.8.199")
""")
client = TestClient()
client.save({"conanfile.py": conanfile})
client.run("create . pkg/0.1@")
client.run("install pkg/0.1@ -g PkgConfigDeps")
pc_content = client.load("pkg-mycomponent.pc")
assert "componentdir=${prefix}/mydir" in pc_content
assert "Version: 19.8.199" in pc_content
def test_pkg_with_public_deps_and_component_requires():
"""
Testing a complex structure like:
* first/0.1
- Global pkg_config_name == "myfirstlib"
- Components: "cmp1"
* other/0.1
* second/0.1
- Requires: "first/0.1"
- Components: "mycomponent", "myfirstcomp"
+ "mycomponent" requires "first::cmp1"
+ "myfirstcomp" requires "mycomponent"
* third/0.1
- Requires: "second/0.1", "other/0.1"
Expected file structure after running PkgConfigDeps as generator:
- other.pc
- myfirstlib-cmp1.pc
- myfirstlib.pc
- second-mycomponent.pc
- second-myfirstcomp.pc
- second.pc
- third.pc
"""
client = TestClient()
conanfile = textwrap.dedent("""
from conan import ConanFile
class Recipe(ConanFile):
def package_info(self):
self.cpp_info.set_property("pkg_config_name", "myfirstlib")
self.cpp_info.components["cmp1"].libs = ["libcmp1"]
""")
client.save({"conanfile.py": conanfile})
client.run("create . first/0.1@")
client.save({"conanfile.py": GenConanfile("other", "0.1").with_package_file("file.h", "0.1")})
client.run("create .")
conanfile = textwrap.dedent("""
from conan import ConanFile
class PkgConfigConan(ConanFile):
requires = "first/0.1"
def package_info(self):
self.cpp_info.components["mycomponent"].requires.append("first::cmp1")
self.cpp_info.components["myfirstcomp"].requires.append("mycomponent")
""")
client.save({"conanfile.py": conanfile}, clean_first=True)
client.run("create . second/0.1@")
client.save({"conanfile.py": GenConanfile("third", "0.1").with_package_file("file.h", "0.1")
.with_require("second/0.1")
.with_require("other/0.1")},
clean_first=True)
client.run("create .")
client2 = TestClient(cache_folder=client.cache_folder)
conanfile = textwrap.dedent("""
[requires]
third/0.1
[generators]
PkgConfigDeps
""")
client2.save({"conanfile.txt": conanfile})
client2.run("install .")
pc_content = client2.load("third.pc")
# Originally posted: https://github.com/conan-io/conan/issues/9939
assert "Requires: second other" == get_requires_from_content(pc_content)
pc_content = client2.load("second.pc")
assert "Requires: second-mycomponent second-myfirstcomp" == get_requires_from_content(pc_content)
pc_content = client2.load("second-mycomponent.pc")
assert "Requires: myfirstlib-cmp1" == get_requires_from_content(pc_content)
pc_content = client2.load("second-myfirstcomp.pc")
assert "Requires: second-mycomponent" == get_requires_from_content(pc_content)
pc_content = client2.load("myfirstlib.pc")
assert "Requires: myfirstlib-cmp1" == get_requires_from_content(pc_content)
pc_content = client2.load("other.pc")
assert "" == get_requires_from_content(pc_content)
def test_pkg_with_public_deps_and_component_requires_2():
"""
Testing another complex structure like:
* other/0.1
- Global pkg_config_name == "fancy_name"
- Components: "cmp1", "cmp2", "cmp3"
+ "cmp1" pkg_config_name == "component1" (it shouldn't be affected by "fancy_name")
+ "cmp3" pkg_config_name == "component3" (it shouldn't be affected by "fancy_name")
+ "cmp3" requires "cmp1"
* pkg/0.1
- Requires: "other/0.1" -> "other::cmp1"
Expected file structure after running PkgConfigDeps as generator:
- component1.pc
- component3.pc
- other-cmp2.pc
- other.pc
- pkg.pc
"""
client = TestClient()
conanfile = textwrap.dedent("""
from conan import ConanFile
class Recipe(ConanFile):
def package_info(self):
self.cpp_info.set_property("pkg_config_name", "fancy_name")
self.cpp_info.components["cmp1"].libs = ["other_cmp1"]
self.cpp_info.components["cmp1"].set_property("pkg_config_name", "component1")
self.cpp_info.components["cmp2"].libs = ["other_cmp2"]
self.cpp_info.components["cmp3"].requires.append("cmp1")
self.cpp_info.components["cmp3"].set_property("pkg_config_name", "component3")
""")
client.save({"conanfile.py": conanfile})
client.run("create . other/1.0@")
conanfile = textwrap.dedent("""
from conan import ConanFile
class PkgConfigConan(ConanFile):
requires = "other/1.0"
def package_info(self):
self.cpp_info.requires = ["other::cmp1"]
""")
client.save({"conanfile.py": conanfile})
client.run("create . pkg/0.1@")
client2 = TestClient(cache_folder=client.cache_folder)
conanfile = textwrap.dedent("""
[requires]
pkg/0.1
[generators]
PkgConfigDeps
""")
client2.save({"conanfile.txt": conanfile})
client2.run("install .")
pc_content = client2.load("pkg.pc")
assert "Requires: component1" == get_requires_from_content(pc_content)
pc_content = client2.load("fancy_name.pc")
assert "Requires: component1 fancy_name-cmp2 component3" == get_requires_from_content(pc_content)
assert client2.load("component1.pc")
assert client2.load("fancy_name-cmp2.pc")
pc_content = client2.load("component3.pc")
assert "Requires: component1" == get_requires_from_content(pc_content)
def test_pkg_config_name_full_aliases():
"""
Testing a simpler structure but paying more attention into several aliases.
Expected file structure after running PkgConfigDeps as generator:
- compo1.pc
- compo1_alias.pc
- pkg_alias1.pc
- pkg_alias2.pc
- pkg_other_name.pc
- second-mycomponent.pc
- second.pc
"""
client = TestClient()
conanfile = textwrap.dedent("""
from conan import ConanFile
class Recipe(ConanFile):
def package_info(self):
self.cpp_info.set_property("pkg_config_name", "pkg_other_name")
self.cpp_info.set_property("pkg_config_aliases", ["pkg_alias1", "pkg_alias2"])
self.cpp_info.components["cmp1"].libs = ["libcmp1"]
self.cpp_info.components["cmp1"].set_property("pkg_config_name", "compo1")
self.cpp_info.components["cmp1"].set_property("pkg_config_aliases", ["compo1_alias"])
""")
client.save({"conanfile.py": conanfile})
client.run("create . first/0.3@")
conanfile = textwrap.dedent("""
from conan import ConanFile
class PkgConfigConan(ConanFile):
requires = "first/0.3"
def package_info(self):
self.cpp_info.components["mycomponent"].requires.append("first::cmp1")
""")
client.save({"conanfile.py": conanfile}, clean_first=True)
client.run("create . second/0.2@")
conanfile = textwrap.dedent("""
[requires]
second/0.2
[generators]
PkgConfigDeps
""")
client.save({"conanfile.txt": conanfile}, clean_first=True)
client.run("install .")
pc_content = client.load("compo1.pc")
assert "Description: Conan component: pkg_other_name-compo1" in pc_content
assert "Requires" not in pc_content
pc_content = client.load("compo1_alias.pc")
content = textwrap.dedent("""\
Name: compo1_alias
Description: Alias compo1_alias for compo1
Version: 0.3
Requires: compo1
""")
assert content == pc_content
pc_content = client.load("pkg_other_name.pc")
assert "Description: Conan package: pkg_other_name" in pc_content
assert "Requires: compo1" in pc_content
pc_content = client.load("pkg_alias1.pc")
content = textwrap.dedent("""\
Name: pkg_alias1
Description: Alias pkg_alias1 for pkg_other_name
Version: 0.3
Requires: pkg_other_name
""")
assert content == pc_content
pc_content = client.load("pkg_alias2.pc")
content = textwrap.dedent("""\
Name: pkg_alias2
Description: Alias pkg_alias2 for pkg_other_name
Version: 0.3
Requires: pkg_other_name
""")
assert content == pc_content
pc_content = client.load("second-mycomponent.pc")
assert "Requires: compo1" == get_requires_from_content(pc_content)
def test_duplicated_names_warnings():
"""
Testing some WARN messages if there are duplicated pkg_config_name/pkg_config_aliases defined
Scenario: consumer -> pkga/1.0 -> pkgb/1.0
Expected WARN cases:
- Duplicated aliases.
- Duplicated names, alias and component name
- Duplicated components names.
- Duplicated package and component name.
- Duplicated names between different dependencies.
"""
client = TestClient()
conanfile = textwrap.dedent("""
from conan import ConanFile
class Recipe(ConanFile):
def package_info(self):
self.cpp_info.set_property("pkg_config_name", "libpkg")
# Duplicated components
self.cpp_info.components["cmp1"].set_property("pkg_config_name", "component1")
self.cpp_info.components["cmp2"].set_property("pkg_config_name", "component1")
# Duplicated package and component name
self.cpp_info.components["cmp3"].set_property("pkg_config_name", "libpkg")
""")
client.save({"conanfile.py": conanfile})
client.run("create . pkgb/1.0@")
conanfile = textwrap.dedent("""
from conan import ConanFile
class PkgConfigConan(ConanFile):
requires = "pkgb/1.0"
def package_info(self):
# Duplicated name as pkgb
self.cpp_info.set_property("pkg_config_name", "libpkg")
self.cpp_info.components["cmp1"].requires.append("pkgb::cmp1")
self.cpp_info.components["cmp1"].set_property("pkg_config_name", "component1")
# Duplicated aliases
self.cpp_info.components["cmp2"].set_property("pkg_config_aliases", ["alias1"])
self.cpp_info.components["cmp3"].set_property("pkg_config_aliases", ["alias1"])
# Duplicated names, alias and component name
self.cpp_info.components["cmp2"].set_property("pkg_config_name", "libcmp")
self.cpp_info.components["cmp4"].set_property("pkg_config_aliases", ["libcmp"])
""")
client.save({"conanfile.py": conanfile}, clean_first=True)
client.run("create . pkga/1.0@")
conanfile = textwrap.dedent("""
[requires]
pkga/1.0
[generators]
PkgConfigDeps
""")
client.save({"conanfile.txt": conanfile}, clean_first=True)
client.run("install .")
output = client.out
# Duplicated aliases from pkga
assert "WARN: [pkga/1.0] The PC alias name alias1.pc already exists and it matches with " \
"another alias one" in output
# Duplicated names, alias and component name from pkga
# Issue related: https://github.com/conan-io/conan/issues/10341
assert "WARN: [pkga/1.0] The PC alias name libcmp.pc already exists and it matches with " \
"another package/component one" in output
# Duplicated components from pkgb
assert "WARN: [pkgb/1.0] The PC component name component1.pc already exists and it matches " \
"with another component one" in output
# Duplicated package and component name from pkgb
assert "WARN: [pkgb/1.0] The PC package name libpkg.pc already exists and it matches with " \
"another component one" in output
# Duplicated names between pkgb and pkga
assert "WARN: [pkgb/1.0] The PC file name component1.pc already exists and it matches with " \
"another name/alias declared in pkga/1.0 package" in output
assert "WARN: [pkgb/1.0] The PC file name libpkg.pc already exists and it matches with " \
"another name/alias declared in pkga/1.0 package" in output
pc_files = [os.path.basename(i) for i in glob.glob(os.path.join(client.current_folder, '*.pc'))]
pc_files.sort()
# Let's check all the PC file names created just in case
assert pc_files == ['alias1.pc', 'component1.pc', 'libcmp.pc', 'libpkg-cmp3.pc',
'libpkg-cmp4.pc', 'libpkg.pc']
def test_components_and_package_pc_creation_order():
"""
Testing if the root package PC file name matches with any of the components one, the first one
is not going to be created. Components have more priority than root package.
Issue related: https://github.com/conan-io/conan/issues/10341
"""
client = TestClient()
conanfile = textwrap.dedent("""
from conan import ConanFile
class PkgConfigConan(ConanFile):
def package_info(self):
self.cpp_info.set_property("pkg_config_name", "OpenCL")
self.cpp_info.components["_opencl-headers"].set_property("pkg_config_name", "OpenCL")
self.cpp_info.components["_opencl-other"].set_property("pkg_config_name", "OtherCL")
""")
client.save({"conanfile.py": conanfile})
client.run("create . opencl/1.0@")
conanfile = textwrap.dedent("""
from conan import ConanFile
class PkgConfigConan(ConanFile):
requires = "opencl/1.0"
def package_info(self):
self.cpp_info.components["comp"].set_property("pkg_config_name", "pkgb")
self.cpp_info.components["comp"].requires.append("opencl::_opencl-headers")
""")
client.save({"conanfile.py": conanfile}, clean_first=True)
client.run("create . pkgb/1.0@")
conanfile = textwrap.dedent("""
[requires]
pkgb/1.0
[generators]
PkgConfigDeps
""")
client.save({"conanfile.txt": conanfile}, clean_first=True)
client.run("install .")
pc_files = [os.path.basename(i) for i in glob.glob(os.path.join(client.current_folder, '*.pc'))]
pc_files.sort()
# Let's check all the PC file names created just in case
assert pc_files == ['OpenCL.pc', 'OtherCL.pc', 'pkgb.pc']
pc_content = client.load("OpenCL.pc")
assert "Name: OpenCL" in pc_content
assert "Description: Conan component: OpenCL" in pc_content
assert "Requires:" not in pc_content
pc_content = client.load("pkgb.pc")
assert "Requires: OpenCL" in get_requires_from_content(pc_content)
def test_pkgconfigdeps_with_test_requires():
"""
PkgConfigDeps has to create any test requires declared on the recipe.
Related issue: https://github.com/conan-io/conan/issues/11376
"""
client = TestClient()
with client.chdir("app"):
client.run("new app/1.0 -m cmake_lib")
# client.run("new cmake_lib -d name=app -d version=1.0")
client.run("create .")
with client.chdir("test"):
client.run("new test/1.0 -m cmake_lib")
# client.run("new cmake_lib -d name=test -d version=1.0")
client.run("create .")
# Create library having build and test requires
conanfile = textwrap.dedent(r'''
from conan import ConanFile
class HelloLib(ConanFile):
def build_requirements(self):
self.test_requires('app/1.0')
self.test_requires('test/1.0')
''')
client.save({"conanfile.py": conanfile}, clean_first=True)
client.run("install . -g PkgConfigDeps")
assert "Description: Conan package: test" in client.load("test.pc")
assert "Description: Conan package: app" in client.load("app.pc")
def test_with_editable_layout():
"""
https://github.com/conan-io/conan/issues/11435
"""
client = TestClient()
dep = textwrap.dedent("""
from conan import ConanFile
from conan.tools.files import save
class Dep(ConanFile):
name = "dep"
version = "0.1"
def layout(self):
self.cpp.source.includedirs = ["include"]
def package_info(self):
self.cpp_info.libs = ["mylib"]
""")
client.save({"dep/conanfile.py": dep,
"dep/include/header.h": "",
"pkg/conanfile.py": GenConanfile("pkg", "0.1").with_requires("dep/0.1")})
client.run("create dep")
client.run("editable add dep dep/0.1")
with client.chdir("pkg"):
client.run("install . -g PkgConfigDeps")
pc = client.load("dep.pc")
assert "Libs: -lmylib" in pc
assert 'includedir1=' in pc
assert 'Cflags: -I"${includedir1}"' in pc
| 37.239757
| 101
| 0.617905
|
ef0bcf8f72e58b6ac21a095e0eeabd6b9a415bce
| 10,934
|
py
|
Python
|
Fujitsu/benchmarks/resnet/implementations/mxnet/3rdparty/tvm/tests/python/unittest/test_hybrid_script.py
|
mengkai94/training_results_v0.6
|
43dc3e250f8da47b5f8833197d74cb8cf1004fc9
|
[
"Apache-2.0"
] | 64
|
2021-05-02T14:42:34.000Z
|
2021-05-06T01:35:03.000Z
|
tests/python/unittest/test_hybrid_script.py
|
clhne/tvm
|
d59320c764bd09474775e1b292f3c05c27743d24
|
[
"Apache-2.0"
] | 23
|
2019-07-29T05:21:52.000Z
|
2020-08-31T18:51:42.000Z
|
tests/python/unittest/test_hybrid_script.py
|
clhne/tvm
|
d59320c764bd09474775e1b292f3c05c27743d24
|
[
"Apache-2.0"
] | 51
|
2019-07-12T05:10:25.000Z
|
2021-07-28T16:19:06.000Z
|
import tvm, inspect, sys, traceback, numpy, nose
from tvm.hybrid import script
from tvm.hybrid.intrin import HYBRID_GLOBALS
@nose.tools.nottest
def run_and_check(func, args, outs, var_dict={}, target='llvm'):
def tvm_val_2_py_val(val):
val = tvm.ir_pass.Substitute(val, var_dict)
val = tvm.ir_pass.Simplify(val)
assert isinstance(val, (tvm.expr.IntImm, tvm.expr.UIntImm))
return val.value
ctx = tvm.context(target, 0)
emu_args = []
nd_args = []
to_check = []
for i in args:
if isinstance(i, tvm.tensor.Tensor):
shape = [tvm_val_2_py_val(j) for j in i.shape]
if i in outs:
emu_args.append(numpy.zeros(shape).astype(i.dtype))
nd_args.append(tvm.nd.array(emu_args[-1], ctx))
to_check.append((nd_args[-1], emu_args[-1]))
else:
emu_args.append(numpy.random.randn(*shape).astype(i.dtype))
nd_args.append(tvm.nd.array(emu_args[-1], ctx))
else:
assert isinstance(i, tvm.expr.Var)
emu_args.append(tvm_val_2_py_val(i))
nd_args.append(emu_args[-1])
func(*emu_args)
lowerd_func = tvm.lower(func(*args), args)
module = tvm.build(lowerd_func, target=target)
assert module
module(*nd_args)
for nd, np in to_check:
numpy.testing.assert_allclose(nd.asnumpy(), np, rtol=1e-5, atol=1e-5)
@script
def outer_product(n, m, a, b, c):
"""This is a simple outer product"""
for i in range(n):
for j in range(m):
c[i, j] = a[i] * b[j]
#Test global function
#Test bridge between frontend and backend
def test_outer_product():
n = tvm.var('n')
m = tvm.var('m')
a = tvm.placeholder((n, ), name='a')
b = tvm.placeholder((m, ), name='b')
c = tvm.placeholder((n, m), name='c')
ir = outer_product(n, m, a, b, c)
#Check for i in (0, n)
assert isinstance(ir, tvm.stmt.For)
assert ir.loop_var.name == 'i'
assert ir.min.value == 0
assert ir.extent.name == 'n'
ibody = ir.body
assert isinstance(ibody, tvm.stmt.For)
#Check for j in (0, m)
assert ibody.loop_var.name == 'j'
assert ibody.min.value == 0
assert ibody.extent.name == 'm'
#Check loop body
jbody = ibody.body
assert isinstance(jbody, tvm.stmt.Provide)
assert jbody.func.name == 'c'
assert len(jbody.args) == 2
assert jbody.args[0].name == 'i'
assert jbody.args[1].name == 'j'
assert isinstance(jbody.value, tvm.expr.Mul)
mul = jbody.value
assert isinstance(mul.a, tvm.expr.Call)
assert mul.a.name == 'a'
assert mul.b.name == 'b'
func = tvm.lower(ir, [n, m, a, b, c])
func = tvm.build(func)
run_and_check(outer_product, [n, m, a, b, c], [c], {n: 999, m: 1001})
for key, _ in HYBRID_GLOBALS.items():
assert key not in globals().keys()
assert key not in outer_product.__globals__.keys()
#Test local function
#Test allocation of local variable
def test_fanout():
@script
def fanout(n, a, b):
three = 3.0
for i in range(a.shape[0] - 3):
sigma = 0.0
for j in range(3):
sigma = sigma + a[i + j]
sigma = sigma / three
b[i] = sigma
n = tvm.var('n')
a = tvm.placeholder((n, ), 'float32', name='a')
b = tvm.placeholder((n-3, ), 'float32', name='b')
ir = fanout(n, a, b)
#Check for i in (0, n-3)
assert isinstance(ir, tvm.stmt.For)
assert ir.loop_var.name == 'i'
assert ir.min.value == 0
assert tvm.ir_pass.Equal(ir.extent, n - 3)
#Check loopbody
ibody = ir.body
assert isinstance(ibody, tvm.stmt.AttrStmt)
abody = ibody.body
assert isinstance(abody, tvm.stmt.Realize)
assert abody.bounds[0].min.value == 0
assert abody.bounds[0].extent.value == 1
assert abody.func.name == 'sigma'
#Check i loop body
rbody = abody.body
assert isinstance(rbody.first, tvm.stmt.Provide)
assert rbody.first.func.name == 'sigma'
assert len(rbody.first.args) == 1
assert rbody.first.args[0].value == 0
#Check fanout loop
jloop = rbody.rest.first
assert jloop.loop_var.name == 'j'
assert jloop.min.value == 0
assert jloop.extent.value == 3
jbody = jloop.body
assert isinstance(jbody, tvm.stmt.Provide)
assert len(jbody.args) == 1
assert jbody.args[0].value == 0
assert jbody.func.name == 'sigma'
assert isinstance(jbody.value, tvm.expr.Add)
value = jbody.value
assert isinstance(value.a, tvm.expr.Call)
assert value.a.name == 'sigma'
assert len(value.a.args) == 1
assert value.a.args[0].value == 0
assert value.b.name == 'a'
assert len(value.b.args) == 1
assert tvm.ir_pass.Equal(value.b.args[0], ir.loop_var + jloop.loop_var)
divide= rbody.rest.rest.first
assert isinstance(divide, tvm.stmt.Provide)
assert len(divide.args) == 1
assert divide.args[0].value == 0
value = divide.value
assert isinstance(value, tvm.expr.Mul)
assert value.a.name == 'sigma'
assert len(value.a.args) == 1
assert value.a.args[0].value == 0
assert abs(value.b.value - (1 / 3.0)) < 1e-5
write = rbody.rest.rest.rest
assert isinstance(write, tvm.stmt.Provide)
assert write.func.name == 'b'
assert write.value.name == 'sigma'
assert len(write.value.args) == 1
assert write.value.args[0].value == 0
run_and_check(fanout, [n, a, b], [b], {n: 10})
@script
def failure():
for i in range(1, 100):
i = 0
def test_failure():
try:
tvm.hybrid.parse(failure, [])
except IOError as err:
assert sys.version_info[0] == 2
print('[Warning] Case test_failure is skipped by Python2 because "%s"' % str(err))
except Exception as err:
assert str(err) == 'You CAN NEVER overwrite a loop variable!'
def test_looptype():
@script
def looptype(a, b, c):
for i in parallel(8):
a[i] = i
for j in vectorize(8):
b[j] = j
for k in unroll(8):
c[k] = k
a = tvm.placeholder((8, ), name='a', dtype='int32')
b = tvm.placeholder((8, ), name='b', dtype='int32')
c = tvm.placeholder((8, ), name='c', dtype='int32')
ir = looptype(a, b, c)
iloop = ir.first
jloop = ir.rest.first
kloop = ir.rest.rest
assert iloop.for_type == tvm.stmt.For.Parallel
assert jloop.for_type == tvm.stmt.For.Vectorized
assert kloop.for_type == tvm.stmt.For.Unrolled
run_and_check(looptype, [a, b, c], [a, b, c])
def test_if():
@script
def if_then_else(a, b):
for i in range(10):
if i % 2 == 0:
a[i] = -1
else:
a[i] = 1
for i in unroll(10):
b[i] = -1 if i % 2 == 0 else 1
a = tvm.placeholder((10, ), dtype='int32', name='a')
b = tvm.placeholder((10, ), dtype='int32', name='b')
run_and_check(if_then_else, [a, b], [a, b])
def test_bind():
if not tvm.gpu(0).exist:
print('[Warning] No GPU found! Skip bind test!')
return
@script
def vec_add(a, b, c):
for tx in bind('threadIdx.x', 1000):
c[tx] = b[tx] + c[tx]
a = tvm.placeholder((1000, ), dtype='float32', name='a')
b = tvm.placeholder((1000, ), dtype='float32', name='b')
c = tvm.placeholder((1000, ), dtype='float32', name='c')
run_and_check(vec_add, [a, b, c], [c], target='cuda')
def test_math_intrin():
@script
def intrin_real(a):
a[0] = sqrt(a[0])
a[1] = log(a[1])
a[2] = exp(a[2])
a[3] = sigmoid(a[3])
a[4] = power(a[4], a[5])
a[5] = tanh(a[5])
a[6] = min(a[4], a[5])
a[7] = max(a[5], a[6])
a8 = tvm.placeholder((8, ), dtype='float32', name='a')
ir = intrin_real(a8)
func = tvm.build(tvm.lower(ir, [a8]))
assert func
a = numpy.arange(2, 10).astype('float32')
tvm_a = tvm.ndarray.array(a)
func(tvm_a)
intrin_real(a)
numpy.testing.assert_allclose(a, tvm_a.asnumpy(), rtol=1e-5)
@script
def intrin_int(a):
a[0] = popcount(a[0])
a1 = tvm.placeholder((1, ), dtype='int32')
ir = intrin_int(a1)
func = tvm.build(tvm.lower(ir, [a1]))
assert func
a = numpy.array([1234567890]).astype('int32')
tvm_a = tvm.ndarray.array(a)
intrin_int(a)
func(tvm_a)
assert tvm_a.asnumpy()[0] == a[0]
def test_non_zero():
@tvm.hybrid.script
def blur(a, b):
for i in range(2, 32):
for j in range(2, 32):
s = 0.0
for di in range(3):
for dj in range(3):
s = s + a[i-di, j-dj]
b[i-2, j-2] = s / 9.0
try:
a = tvm.placeholder((32, 32), 'float32', 'a')
b = tvm.placeholder((30, 30), 'float32', 'b')
run_and_check(blur, [a, b], [b])
except IOError as err:
assert sys.version_info[0] == 2
print('[Warning] Case test_non_zero is skipped by Python2 because "%s"' % str(err))
@tvm.hybrid.script
def triangle(a, b, c):
for i in range(10):
for j in range(i, 10):
c[i, j] = a[i] * b[j]
a = tvm.placeholder((10, ), dtype='float32', name='a')
b = tvm.placeholder((10, ), dtype='float32', name='b')
c = tvm.placeholder((10, 10), dtype='float32', name='c')
run_and_check(triangle, [a, b, c], [c])
def test_allocate():
@tvm.hybrid.script
def blur2d(a, b):
for i in range(30):
ha = allocate((3, 30), 'float32')
for j in range(3):
for k in range(30):
ha[j, k] = a[i+j, k] + a[i+j, k+1] + a[i+j, k+2]
for j in range(30):
b[i, j] = (ha[0, j] + ha[1, j] + ha[2, j]) / 9.0
a = tvm.placeholder((32, 32), 'float32', 'a')
b = tvm.placeholder((30, 30), 'float32', 'b')
run_and_check(blur2d, [a, b], [b])
if tvm.gpu().exist:
@tvm.hybrid.script
def share_vec_add(a, b, c):
shared = allocate((256, ), 'float32', 'shared')
for i in bind("threadIdx.x", 256):
shared[i] = a[i]
local = allocate((256, ), 'float32', 'local')
for i in bind("threadIdx.x", 256):
local[i] = b[i]
for i in bind("threadIdx.x", 256):
c[i] = shared[i] + local[i]
a = tvm.placeholder((256, ), dtype='float32', name='a')
b = tvm.placeholder((256, ), dtype='float32', name='b')
c = tvm.placeholder((256, ), dtype='float32', name='c')
run_and_check(share_vec_add, [a, b, c], [c], target='cuda')
else:
print('[Warning] No GPU found! Skip shared mem test!')
if __name__ == "__main__":
test_outer_product()
test_fanout()
test_failure()
test_looptype()
test_if()
test_bind()
test_math_intrin()
test_non_zero()
test_allocate()
| 30.974504
| 91
| 0.561551
|
413feb4744ad7b46716e5aaf004a45edf1adf88e
| 7,667
|
py
|
Python
|
src/simmate/calculators/vasp/tasks/_to_do/nudged_elastic_band.py
|
sionab/simmate
|
6dedea7310829aae425bf3393e7923e454a0129f
|
[
"BSD-3-Clause"
] | null | null | null |
src/simmate/calculators/vasp/tasks/_to_do/nudged_elastic_band.py
|
sionab/simmate
|
6dedea7310829aae425bf3393e7923e454a0129f
|
[
"BSD-3-Clause"
] | null | null | null |
src/simmate/calculators/vasp/tasks/_to_do/nudged_elastic_band.py
|
sionab/simmate
|
6dedea7310829aae425bf3393e7923e454a0129f
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
import os
from pymatgen.analysis.transition_state import NEBAnalysis
from simmate.calculators.vasp.inputs.all import Incar, Poscar, Kpoints, Potcar
from simmate.calculators.vasp.tasks.base import VaspTask
from simmate.calculators.vasp.error_handlers.tetrahedron_mesh import TetrahedronMesh
from simmate.calculators.vasp.error_handlers.eddrmm import Eddrmm
# NOTE TO USER:
# This NEB task is very different from all other VASP tasks!
#
# The first big difference is that it takes a list of structures instead of
# just one structure. This means when you set "structure=..." that you should
# actually do "structure=[structure1, structure2, structure3, ...]". This would
# be clearer if we made this input variable named "structures" instead of just
# "structure", but this requires more reworking on Simmate's end, and we
# unfortunately haven't had time to fix it yet.
#
# The second big difference is that VASP uses a different folder setup when
# running these calculations. It has a series of folders named 00, 01, 02, ... N,
# where 00 is the starting image, N is the endpoint image, and 01 to (N-1) are
# the pathway images. Simmate handles this inside the task, but knowing this
# may be useful if you'd like to make your own variation of this class.
class NudgedElasticBandTask(VaspTask):
# The default settings to use for this calculation.
# To tell VASP that we are doing an NEB calculation, we need to set the
# IMAGES
incar = dict(
# These settings are from MITRelaxSet
# https://github.com/materialsproject/pymatgen/blob/v2022.0.9/pymatgen/io/vasp/MPRelaxSet.yaml
ALGO="Normal", # TEMPORARY SWITCH FROM Fast
EDIFF=1.0e-05,
ENCUT=520,
# IBRION=2, --> overwritten by MITNEBSet below
ICHARG=1,
ISIF=3,
ISMEAR=-5,
ISPIN=2,
ISYM=0,
# LDAU --> These parameters are excluded for now.
LORBIT=11,
LREAL="auto",
LWAVE=False,
NELM=200,
NELMIN=6,
NSW=99, # !!! Changed to static energy for testing
PREC="Accurate",
SIGMA=0.05,
KSPACING=0.5, # --> This is VASP default and not the same as pymatgen
# These settings are from MITNEBSet
# https://github.com/materialsproject/pymatgen/blob/v2022.0.9/pymatgen/io/vasp/sets.py#L2376-L2491
# IMAGES=len(structures) - 2, --> set inside task
IBRION=1,
# ISYM=0, --> duplicate of setting above
LCHARG=False,
# LDAU=False, --> already the default value
# TODO: Allow IMAGES to be set like shown below.
# For this, we use "__auto" to let Simmate set this automatically by
# using the input structures given.
# IMAGES__auto=True,
)
# We will use the PBE functional with all default mappings
functional = "PBE"
# We also set up some error handlers that are commonly used with
error_handlers = [TetrahedronMesh()]
def _pre_checks(self, structure, directory):
# This function is used inside of this class's setup method (shown below),
# where we make sure the user has everything set up properly.
# The first common mistake is that the user didn't provide a list of
# structures as the input.
if type(structure) != list:
raise TypeError(
"This task requires multiple structures given as a list! "
"So your input should look like this..\n"
"structure=[structure_start, structure_image1, structure_image2, ..., structure_end]"
"\nWe apologize that this input can be confusing, and we'll "
" work to fix this in the future!"
)
# The next common mistake is to mislabel the number of images in the INCAR
# file.
# first, we check if the user set this.
nimages = self.incar.get("IMAGES")
if nimages:
# if so, we check that it was set correctly. It should be equal to
# the number of structures minus 2 (because we don't count the
# start and end images here.)
if nimages != (len(structure) - 2):
raise Exception(
"IMAGES looks to be improperly set! This value should not"
" include the start/end images -- so make sure you counted"
" properly. Alternatively, you also can remove this keyword"
" from your INCAR and Simmate will provide it automatically"
" for you."
)
# TODO: add a precheck that ensures the number of cores VASP is ran on
# is also divisible by the number of images. For example...
# "mpirun -n 16 vasp" will not work for IMAGES=3 because 16 is not
# divisible by 3. But this also may be better suited for an ErrorHandler
def setup(self, structure, directory):
# run some prechecks to make sure the user has everything set up properly.
self._pre_checks(structure, directory)
# Here, each image (start to end structures) is put inside of its own
# folder. We make those folders here, where they are named 00, 01, 02...N
# Also recall that "structure" is really a list of structures here.
for i, image in enumerate(structure):
# first make establish the foldername
# The zfill function converts numbers from "1" to "01" for us
foldername = os.path.join(directory, str(i).zfill(2))
# see if the folder exists, and if not, make it
if not os.path.exists(foldername):
os.mkdir(foldername)
# now write the poscar file inside the folder
Poscar.to_file(image, os.path.join(foldername, "POSCAR"))
# We also need to check if the user set IMAGES in the INCAR. If not,
# we set that for them here.
if not self.incar.get("IMAGES"):
self.incar["IMAGES"] = len(structure) - 2
# BUG: changing this class attribute may not be safe to do when this
# task is used accross multiple pathways with different image numbers.
# It may be better to make a separate incar dictionary that we then pass
# to Incar() below.
# write the incar file
Incar(**self.incar).to_file(os.path.join(directory, "INCAR"))
# if KSPACING is not provided AND kpoints is, write the KPOINTS file
if self.kpoints and ("KSPACING" not in self.incar):
Kpoints.to_file(
# We use the first image as all should give the same result
structure[0],
self.kpoints,
os.path.join(directory, "KPOINTS"),
)
# write the POTCAR file
Potcar.to_file_from_type(
# We use the first image as all should give the same result
structure[0].composition.elements,
self.functional,
os.path.join(directory, "POTCAR"),
self.potcar_mappings,
)
def workup(self, directory):
# BUG: For now I assume there are start/end image directories are located
# in the working directory. This bad assumption is made as I'm just quickly
# trying to get results for some labmates. In the future, I need to search
# a number of places for these directories.
neb_results = NEBAnalysis.from_dir(
directory,
relaxation_dirs=["start_image_relaxation", "end_image_relaxation"],
)
# plot the results
plot = neb_results.get_plot()
plot.savefig("NEB_plot.jpeg")
| 44.063218
| 106
| 0.635581
|
0e6432880e334538bfd90598f42c86b94f920b85
| 1,307
|
py
|
Python
|
digital_signature.py
|
julzhk/crypto_spike
|
435787aee5bc0d456a871e9e93fd952bc148090c
|
[
"Unlicense"
] | null | null | null |
digital_signature.py
|
julzhk/crypto_spike
|
435787aee5bc0d456a871e9e93fd952bc148090c
|
[
"Unlicense"
] | null | null | null |
digital_signature.py
|
julzhk/crypto_spike
|
435787aee5bc0d456a871e9e93fd952bc148090c
|
[
"Unlicense"
] | null | null | null |
# coding=utf-8
import hashlib
from random import randint
def generate_keys(keysize=256):
# (sk, pk) := generateKeys(keysize) The generateKeys method takes a key size and generates
# a key pair. The secret key sk is kept privately and used to sign messages. pk is the public
# verification key that you give to everybody. Anyone with this key can verify your signature.
hasher = hashlib.md5()
rand = str(randint(0,1000))
hasher.update(rand)
hasher.update('public')
pk = hasher.hexdigest()
hasher.update('secret')
sk = hasher.hexdigest()
return sk,pk
def sign(sk, msg):
# sign(sk, message) The sign method takes a message, msg, and a secret key, sk, as
# input and outputs a signature for the msg under sk
hasher = hashlib.md5()
hasher.update(sk)
hasher.update(msg)
return hasher.hexdigest()
def is_valid(pk,message,signature):
# isValid := verify(pk, message, sig) The verify method takes a message, a signature, and a
# public key as input. It returns a boolean value, isValid, that will be trueif sig is a valid
# signature for message under public key pk, and false otherwise.
return True
sk, pk = generate_keys()
sign= sign(sk=sk, msg='hello')
print is_valid(pk=pk, message='hello',signature=sign)
| 36.305556
| 102
| 0.693191
|
7da09144ab3c469f9e688175397a392c9d3bd1f8
| 767
|
py
|
Python
|
src/iminuit/color.py
|
MarcoGorelli/iminuit
|
496c74b7b43f502838375d7118eed04cdf17fab0
|
[
"MIT"
] | null | null | null |
src/iminuit/color.py
|
MarcoGorelli/iminuit
|
496c74b7b43f502838375d7118eed04cdf17fab0
|
[
"MIT"
] | null | null | null |
src/iminuit/color.py
|
MarcoGorelli/iminuit
|
496c74b7b43f502838375d7118eed04cdf17fab0
|
[
"MIT"
] | null | null | null |
__all__ = ["Gradient"]
class Gradient:
"""Color gradient."""
_steps = None
def __init__(self, *steps):
self._steps = steps
def __call__(self, v):
st = self._steps
z = 0.0
if v < st[0][0]:
z = 0.0
i = 0
elif v >= st[-1][0]:
z = 1.0
i = -2
else:
i = 0
for i in range(len(st) - 1):
if st[i][0] <= v < st[i + 1][0]:
break
z = (v - st[i][0]) / (st[i + 1][0] - st[i][0])
az = 1.0 - z
a = st[i]
b = st[i + 1]
return (az * a[1] + z * b[1], az * a[2] + z * b[2], az * a[3] + z * b[3])
def rgb(self, v):
return "rgb(%.0f,%.0f,%.0f)" % self(v)
| 22.558824
| 81
| 0.352021
|
db8f55d221f411cae61f1e0a06d921abdf0f2d6e
| 53,600
|
py
|
Python
|
sympy/functions/special/bessel.py
|
Abhishek-IOT/sympy
|
e31c4cdedb5080325b3fd04f4b4826d9dab65b26
|
[
"BSD-3-Clause"
] | 2
|
2021-03-04T16:57:06.000Z
|
2021-08-11T01:42:29.000Z
|
sympy/functions/special/bessel.py
|
mmelotti/sympy
|
bea29026d27cc50c2e6a5501b6a70a9629ed3e18
|
[
"BSD-3-Clause"
] | 1
|
2020-04-22T12:45:26.000Z
|
2020-04-22T12:45:26.000Z
|
sympy/functions/special/bessel.py
|
mmelotti/sympy
|
bea29026d27cc50c2e6a5501b6a70a9629ed3e18
|
[
"BSD-3-Clause"
] | 3
|
2019-05-18T21:32:31.000Z
|
2019-07-26T11:05:46.000Z
|
from functools import wraps
from sympy import S, pi, I, Rational, Wild, cacheit, sympify
from sympy.core.function import Function, ArgumentIndexError
from sympy.core.power import Pow
from sympy.functions.combinatorial.factorials import factorial
from sympy.functions.elementary.trigonometric import sin, cos, csc, cot
from sympy.functions.elementary.complexes import Abs
from sympy.functions.elementary.miscellaneous import sqrt, root
from sympy.functions.elementary.complexes import re, im
from sympy.functions.special.gamma_functions import gamma
from sympy.functions.special.hyper import hyper
from sympy.polys.orthopolys import spherical_bessel_fn as fn
# TODO
# o Scorer functions G1 and G2
# o Asymptotic expansions
# These are possible, e.g. for fixed order, but since the bessel type
# functions are oscillatory they are not actually tractable at
# infinity, so this is not particularly useful right now.
# o Series Expansions for functions of the second kind about zero
# o Nicer series expansions.
# o More rewriting.
# o Add solvers to ode.py (or rather add solvers for the hypergeometric equation).
class BesselBase(Function):
"""
Abstract base class for Bessel-type functions.
This class is meant to reduce code duplication.
All Bessel-type functions can 1) be differentiated, with the derivatives
expressed in terms of similar functions, and 2) be rewritten in terms
of other Bessel-type functions.
Here, Bessel-type functions are assumed to have one complex parameter.
To use this base class, define class attributes ``_a`` and ``_b`` such that
``2*F_n' = -_a*F_{n+1} + b*F_{n-1}``.
"""
@property
def order(self):
""" The order of the Bessel-type function. """
return self.args[0]
@property
def argument(self):
""" The argument of the Bessel-type function. """
return self.args[1]
@classmethod
def eval(cls, nu, z):
return
def fdiff(self, argindex=2):
if argindex != 2:
raise ArgumentIndexError(self, argindex)
return (self._b/2 * self.__class__(self.order - 1, self.argument) -
self._a/2 * self.__class__(self.order + 1, self.argument))
def _eval_conjugate(self):
z = self.argument
if z.is_extended_negative is False:
return self.__class__(self.order.conjugate(), z.conjugate())
def _eval_expand_func(self, **hints):
nu, z, f = self.order, self.argument, self.__class__
if nu.is_extended_real:
if (nu - 1).is_extended_positive:
return (-self._a*self._b*f(nu - 2, z)._eval_expand_func() +
2*self._a*(nu - 1)*f(nu - 1, z)._eval_expand_func()/z)
elif (nu + 1).is_extended_negative:
return (2*self._b*(nu + 1)*f(nu + 1, z)._eval_expand_func()/z -
self._a*self._b*f(nu + 2, z)._eval_expand_func())
return self
def _eval_simplify(self, **kwargs):
from sympy.simplify.simplify import besselsimp
return besselsimp(self)
class besselj(BesselBase):
r"""
Bessel function of the first kind.
Explanation
===========
The Bessel $J$ function of order $\nu$ is defined to be the function
satisfying Bessel's differential equation
.. math ::
z^2 \frac{\mathrm{d}^2 w}{\mathrm{d}z^2}
+ z \frac{\mathrm{d}w}{\mathrm{d}z} + (z^2 - \nu^2) w = 0,
with Laurent expansion
.. math ::
J_\nu(z) = z^\nu \left(\frac{1}{\Gamma(\nu + 1) 2^\nu} + O(z^2) \right),
if $\nu$ is not a negative integer. If $\nu=-n \in \mathbb{Z}_{<0}$
*is* a negative integer, then the definition is
.. math ::
J_{-n}(z) = (-1)^n J_n(z).
Examples
========
Create a Bessel function object:
>>> from sympy import besselj, jn
>>> from sympy.abc import z, n
>>> b = besselj(n, z)
Differentiate it:
>>> b.diff(z)
besselj(n - 1, z)/2 - besselj(n + 1, z)/2
Rewrite in terms of spherical Bessel functions:
>>> b.rewrite(jn)
sqrt(2)*sqrt(z)*jn(n - 1/2, z)/sqrt(pi)
Access the parameter and argument:
>>> b.order
n
>>> b.argument
z
See Also
========
bessely, besseli, besselk
References
==========
.. [1] Abramowitz, Milton; Stegun, Irene A., eds. (1965), "Chapter 9",
Handbook of Mathematical Functions with Formulas, Graphs, and
Mathematical Tables
.. [2] Luke, Y. L. (1969), The Special Functions and Their
Approximations, Volume 1
.. [3] https://en.wikipedia.org/wiki/Bessel_function
.. [4] http://functions.wolfram.com/Bessel-TypeFunctions/BesselJ/
"""
_a = S.One
_b = S.One
@classmethod
def eval(cls, nu, z):
if z.is_zero:
if nu.is_zero:
return S.One
elif (nu.is_integer and nu.is_zero is False) or re(nu).is_positive:
return S.Zero
elif re(nu).is_negative and not (nu.is_integer is True):
return S.ComplexInfinity
elif nu.is_imaginary:
return S.NaN
if z is S.Infinity or (z is S.NegativeInfinity):
return S.Zero
if z.could_extract_minus_sign():
return (z)**nu*(-z)**(-nu)*besselj(nu, -z)
if nu.is_integer:
if nu.could_extract_minus_sign():
return S.NegativeOne**(-nu)*besselj(-nu, z)
newz = z.extract_multiplicatively(I)
if newz: # NOTE we don't want to change the function if z==0
return I**(nu)*besseli(nu, newz)
# branch handling:
from sympy import unpolarify, exp
if nu.is_integer:
newz = unpolarify(z)
if newz != z:
return besselj(nu, newz)
else:
newz, n = z.extract_branch_factor()
if n != 0:
return exp(2*n*pi*nu*I)*besselj(nu, newz)
nnu = unpolarify(nu)
if nu != nnu:
return besselj(nnu, z)
def _eval_rewrite_as_besseli(self, nu, z, **kwargs):
from sympy import polar_lift, exp
return exp(I*pi*nu/2)*besseli(nu, polar_lift(-I)*z)
def _eval_rewrite_as_bessely(self, nu, z, **kwargs):
if nu.is_integer is False:
return csc(pi*nu)*bessely(-nu, z) - cot(pi*nu)*bessely(nu, z)
def _eval_rewrite_as_jn(self, nu, z, **kwargs):
return sqrt(2*z/pi)*jn(nu - S.Half, self.argument)
def _eval_is_extended_real(self):
nu, z = self.args
if nu.is_integer and z.is_extended_real:
return True
def _sage_(self):
import sage.all as sage
return sage.bessel_J(self.args[0]._sage_(), self.args[1]._sage_())
class bessely(BesselBase):
r"""
Bessel function of the second kind.
Explanation
===========
The Bessel $Y$ function of order $\nu$ is defined as
.. math ::
Y_\nu(z) = \lim_{\mu \to \nu} \frac{J_\mu(z) \cos(\pi \mu)
- J_{-\mu}(z)}{\sin(\pi \mu)},
where $J_\mu(z)$ is the Bessel function of the first kind.
It is a solution to Bessel's equation, and linearly independent from
$J_\nu$.
Examples
========
>>> from sympy import bessely, yn
>>> from sympy.abc import z, n
>>> b = bessely(n, z)
>>> b.diff(z)
bessely(n - 1, z)/2 - bessely(n + 1, z)/2
>>> b.rewrite(yn)
sqrt(2)*sqrt(z)*yn(n - 1/2, z)/sqrt(pi)
See Also
========
besselj, besseli, besselk
References
==========
.. [1] http://functions.wolfram.com/Bessel-TypeFunctions/BesselY/
"""
_a = S.One
_b = S.One
@classmethod
def eval(cls, nu, z):
if z.is_zero:
if nu.is_zero:
return S.NegativeInfinity
elif re(nu).is_zero is False:
return S.ComplexInfinity
elif re(nu).is_zero:
return S.NaN
if z is S.Infinity or z is S.NegativeInfinity:
return S.Zero
if nu.is_integer:
if nu.could_extract_minus_sign():
return S.NegativeOne**(-nu)*bessely(-nu, z)
def _eval_rewrite_as_besselj(self, nu, z, **kwargs):
if nu.is_integer is False:
return csc(pi*nu)*(cos(pi*nu)*besselj(nu, z) - besselj(-nu, z))
def _eval_rewrite_as_besseli(self, nu, z, **kwargs):
aj = self._eval_rewrite_as_besselj(*self.args)
if aj:
return aj.rewrite(besseli)
def _eval_rewrite_as_yn(self, nu, z, **kwargs):
return sqrt(2*z/pi) * yn(nu - S.Half, self.argument)
def _eval_is_extended_real(self):
nu, z = self.args
if nu.is_integer and z.is_positive:
return True
def _sage_(self):
import sage.all as sage
return sage.bessel_Y(self.args[0]._sage_(), self.args[1]._sage_())
class besseli(BesselBase):
r"""
Modified Bessel function of the first kind.
Explanation
===========
The Bessel $I$ function is a solution to the modified Bessel equation
.. math ::
z^2 \frac{\mathrm{d}^2 w}{\mathrm{d}z^2}
+ z \frac{\mathrm{d}w}{\mathrm{d}z} + (z^2 + \nu^2)^2 w = 0.
It can be defined as
.. math ::
I_\nu(z) = i^{-\nu} J_\nu(iz),
where $J_\nu(z)$ is the Bessel function of the first kind.
Examples
========
>>> from sympy import besseli
>>> from sympy.abc import z, n
>>> besseli(n, z).diff(z)
besseli(n - 1, z)/2 + besseli(n + 1, z)/2
See Also
========
besselj, bessely, besselk
References
==========
.. [1] http://functions.wolfram.com/Bessel-TypeFunctions/BesselI/
"""
_a = -S.One
_b = S.One
@classmethod
def eval(cls, nu, z):
if z.is_zero:
if nu.is_zero:
return S.One
elif (nu.is_integer and nu.is_zero is False) or re(nu).is_positive:
return S.Zero
elif re(nu).is_negative and not (nu.is_integer is True):
return S.ComplexInfinity
elif nu.is_imaginary:
return S.NaN
if im(z) is S.Infinity or im(z) is S.NegativeInfinity:
return S.Zero
if z.could_extract_minus_sign():
return (z)**nu*(-z)**(-nu)*besseli(nu, -z)
if nu.is_integer:
if nu.could_extract_minus_sign():
return besseli(-nu, z)
newz = z.extract_multiplicatively(I)
if newz: # NOTE we don't want to change the function if z==0
return I**(-nu)*besselj(nu, -newz)
# branch handling:
from sympy import unpolarify, exp
if nu.is_integer:
newz = unpolarify(z)
if newz != z:
return besseli(nu, newz)
else:
newz, n = z.extract_branch_factor()
if n != 0:
return exp(2*n*pi*nu*I)*besseli(nu, newz)
nnu = unpolarify(nu)
if nu != nnu:
return besseli(nnu, z)
def _eval_rewrite_as_besselj(self, nu, z, **kwargs):
from sympy import polar_lift, exp
return exp(-I*pi*nu/2)*besselj(nu, polar_lift(I)*z)
def _eval_rewrite_as_bessely(self, nu, z, **kwargs):
aj = self._eval_rewrite_as_besselj(*self.args)
if aj:
return aj.rewrite(bessely)
def _eval_rewrite_as_jn(self, nu, z, **kwargs):
return self._eval_rewrite_as_besselj(*self.args).rewrite(jn)
def _eval_is_extended_real(self):
nu, z = self.args
if nu.is_integer and z.is_extended_real:
return True
def _sage_(self):
import sage.all as sage
return sage.bessel_I(self.args[0]._sage_(), self.args[1]._sage_())
class besselk(BesselBase):
r"""
Modified Bessel function of the second kind.
Explanation
===========
The Bessel $K$ function of order $\nu$ is defined as
.. math ::
K_\nu(z) = \lim_{\mu \to \nu} \frac{\pi}{2}
\frac{I_{-\mu}(z) -I_\mu(z)}{\sin(\pi \mu)},
where $I_\mu(z)$ is the modified Bessel function of the first kind.
It is a solution of the modified Bessel equation, and linearly independent
from $Y_\nu$.
Examples
========
>>> from sympy import besselk
>>> from sympy.abc import z, n
>>> besselk(n, z).diff(z)
-besselk(n - 1, z)/2 - besselk(n + 1, z)/2
See Also
========
besselj, besseli, bessely
References
==========
.. [1] http://functions.wolfram.com/Bessel-TypeFunctions/BesselK/
"""
_a = S.One
_b = -S.One
@classmethod
def eval(cls, nu, z):
if z.is_zero:
if nu.is_zero:
return S.Infinity
elif re(nu).is_zero is False:
return S.ComplexInfinity
elif re(nu).is_zero:
return S.NaN
if im(z) is S.Infinity or im(z) is S.NegativeInfinity:
return S.Zero
if nu.is_integer:
if nu.could_extract_minus_sign():
return besselk(-nu, z)
def _eval_rewrite_as_besseli(self, nu, z, **kwargs):
if nu.is_integer is False:
return pi*csc(pi*nu)*(besseli(-nu, z) - besseli(nu, z))/2
def _eval_rewrite_as_besselj(self, nu, z, **kwargs):
ai = self._eval_rewrite_as_besseli(*self.args)
if ai:
return ai.rewrite(besselj)
def _eval_rewrite_as_bessely(self, nu, z, **kwargs):
aj = self._eval_rewrite_as_besselj(*self.args)
if aj:
return aj.rewrite(bessely)
def _eval_rewrite_as_yn(self, nu, z, **kwargs):
ay = self._eval_rewrite_as_bessely(*self.args)
if ay:
return ay.rewrite(yn)
def _eval_is_extended_real(self):
nu, z = self.args
if nu.is_integer and z.is_positive:
return True
def _sage_(self):
import sage.all as sage
return sage.bessel_K(self.args[0]._sage_(), self.args[1]._sage_())
class hankel1(BesselBase):
r"""
Hankel function of the first kind.
Explanation
===========
This function is defined as
.. math ::
H_\nu^{(1)} = J_\nu(z) + iY_\nu(z),
where $J_\nu(z)$ is the Bessel function of the first kind, and
$Y_\nu(z)$ is the Bessel function of the second kind.
It is a solution to Bessel's equation.
Examples
========
>>> from sympy import hankel1
>>> from sympy.abc import z, n
>>> hankel1(n, z).diff(z)
hankel1(n - 1, z)/2 - hankel1(n + 1, z)/2
See Also
========
hankel2, besselj, bessely
References
==========
.. [1] http://functions.wolfram.com/Bessel-TypeFunctions/HankelH1/
"""
_a = S.One
_b = S.One
def _eval_conjugate(self):
z = self.argument
if z.is_extended_negative is False:
return hankel2(self.order.conjugate(), z.conjugate())
class hankel2(BesselBase):
r"""
Hankel function of the second kind.
Explanation
===========
This function is defined as
.. math ::
H_\nu^{(2)} = J_\nu(z) - iY_\nu(z),
where $J_\nu(z)$ is the Bessel function of the first kind, and
$Y_\nu(z)$ is the Bessel function of the second kind.
It is a solution to Bessel's equation, and linearly independent from
$H_\nu^{(1)}$.
Examples
========
>>> from sympy import hankel2
>>> from sympy.abc import z, n
>>> hankel2(n, z).diff(z)
hankel2(n - 1, z)/2 - hankel2(n + 1, z)/2
See Also
========
hankel1, besselj, bessely
References
==========
.. [1] http://functions.wolfram.com/Bessel-TypeFunctions/HankelH2/
"""
_a = S.One
_b = S.One
def _eval_conjugate(self):
z = self.argument
if z.is_extended_negative is False:
return hankel1(self.order.conjugate(), z.conjugate())
def assume_integer_order(fn):
@wraps(fn)
def g(self, nu, z):
if nu.is_integer:
return fn(self, nu, z)
return g
class SphericalBesselBase(BesselBase):
"""
Base class for spherical Bessel functions.
These are thin wrappers around ordinary Bessel functions,
since spherical Bessel functions differ from the ordinary
ones just by a slight change in order.
To use this class, define the ``_rewrite()`` and ``_expand()`` methods.
"""
def _expand(self, **hints):
""" Expand self into a polynomial. Nu is guaranteed to be Integer. """
raise NotImplementedError('expansion')
def _rewrite(self):
""" Rewrite self in terms of ordinary Bessel functions. """
raise NotImplementedError('rewriting')
def _eval_expand_func(self, **hints):
if self.order.is_Integer:
return self._expand(**hints)
return self
def _eval_evalf(self, prec):
if self.order.is_Integer:
return self._rewrite()._eval_evalf(prec)
def fdiff(self, argindex=2):
if argindex != 2:
raise ArgumentIndexError(self, argindex)
return self.__class__(self.order - 1, self.argument) - \
self * (self.order + 1)/self.argument
def _jn(n, z):
return fn(n, z)*sin(z) + (-1)**(n + 1)*fn(-n - 1, z)*cos(z)
def _yn(n, z):
# (-1)**(n + 1) * _jn(-n - 1, z)
return (-1)**(n + 1) * fn(-n - 1, z)*sin(z) - fn(n, z)*cos(z)
class jn(SphericalBesselBase):
r"""
Spherical Bessel function of the first kind.
Explanation
===========
This function is a solution to the spherical Bessel equation
.. math ::
z^2 \frac{\mathrm{d}^2 w}{\mathrm{d}z^2}
+ 2z \frac{\mathrm{d}w}{\mathrm{d}z} + (z^2 - \nu(\nu + 1)) w = 0.
It can be defined as
.. math ::
j_\nu(z) = \sqrt{\frac{\pi}{2z}} J_{\nu + \frac{1}{2}}(z),
where $J_\nu(z)$ is the Bessel function of the first kind.
The spherical Bessel functions of integral order are
calculated using the formula:
.. math:: j_n(z) = f_n(z) \sin{z} + (-1)^{n+1} f_{-n-1}(z) \cos{z},
where the coefficients $f_n(z)$ are available as
:func:`sympy.polys.orthopolys.spherical_bessel_fn`.
Examples
========
>>> from sympy import Symbol, jn, sin, cos, expand_func, besselj, bessely
>>> from sympy import simplify
>>> z = Symbol("z")
>>> nu = Symbol("nu", integer=True)
>>> print(expand_func(jn(0, z)))
sin(z)/z
>>> expand_func(jn(1, z)) == sin(z)/z**2 - cos(z)/z
True
>>> expand_func(jn(3, z))
(-6/z**2 + 15/z**4)*sin(z) + (1/z - 15/z**3)*cos(z)
>>> jn(nu, z).rewrite(besselj)
sqrt(2)*sqrt(pi)*sqrt(1/z)*besselj(nu + 1/2, z)/2
>>> jn(nu, z).rewrite(bessely)
(-1)**nu*sqrt(2)*sqrt(pi)*sqrt(1/z)*bessely(-nu - 1/2, z)/2
>>> jn(2, 5.2+0.3j).evalf(20)
0.099419756723640344491 - 0.054525080242173562897*I
See Also
========
besselj, bessely, besselk, yn
References
==========
.. [1] http://dlmf.nist.gov/10.47
"""
@classmethod
def eval(cls, nu, z):
if z.is_zero:
if nu.is_zero:
return S.One
elif nu.is_integer:
if nu.is_positive:
return S.Zero
else:
return S.ComplexInfinity
if z in (S.NegativeInfinity, S.Infinity):
return S.Zero
def _rewrite(self):
return self._eval_rewrite_as_besselj(self.order, self.argument)
def _eval_rewrite_as_besselj(self, nu, z, **kwargs):
return sqrt(pi/(2*z)) * besselj(nu + S.Half, z)
def _eval_rewrite_as_bessely(self, nu, z, **kwargs):
return (-1)**nu * sqrt(pi/(2*z)) * bessely(-nu - S.Half, z)
def _eval_rewrite_as_yn(self, nu, z, **kwargs):
return (-1)**(nu) * yn(-nu - 1, z)
def _expand(self, **hints):
return _jn(self.order, self.argument)
class yn(SphericalBesselBase):
r"""
Spherical Bessel function of the second kind.
Explanation
===========
This function is another solution to the spherical Bessel equation, and
linearly independent from $j_n$. It can be defined as
.. math ::
y_\nu(z) = \sqrt{\frac{\pi}{2z}} Y_{\nu + \frac{1}{2}}(z),
where $Y_\nu(z)$ is the Bessel function of the second kind.
For integral orders $n$, $y_n$ is calculated using the formula:
.. math:: y_n(z) = (-1)^{n+1} j_{-n-1}(z)
Examples
========
>>> from sympy import Symbol, yn, sin, cos, expand_func, besselj, bessely
>>> z = Symbol("z")
>>> nu = Symbol("nu", integer=True)
>>> print(expand_func(yn(0, z)))
-cos(z)/z
>>> expand_func(yn(1, z)) == -cos(z)/z**2-sin(z)/z
True
>>> yn(nu, z).rewrite(besselj)
(-1)**(nu + 1)*sqrt(2)*sqrt(pi)*sqrt(1/z)*besselj(-nu - 1/2, z)/2
>>> yn(nu, z).rewrite(bessely)
sqrt(2)*sqrt(pi)*sqrt(1/z)*bessely(nu + 1/2, z)/2
>>> yn(2, 5.2+0.3j).evalf(20)
0.18525034196069722536 + 0.014895573969924817587*I
See Also
========
besselj, bessely, besselk, jn
References
==========
.. [1] http://dlmf.nist.gov/10.47
"""
def _rewrite(self):
return self._eval_rewrite_as_bessely(self.order, self.argument)
@assume_integer_order
def _eval_rewrite_as_besselj(self, nu, z, **kwargs):
return (-1)**(nu+1) * sqrt(pi/(2*z)) * besselj(-nu - S.Half, z)
@assume_integer_order
def _eval_rewrite_as_bessely(self, nu, z, **kwargs):
return sqrt(pi/(2*z)) * bessely(nu + S.Half, z)
def _eval_rewrite_as_jn(self, nu, z, **kwargs):
return (-1)**(nu + 1) * jn(-nu - 1, z)
def _expand(self, **hints):
return _yn(self.order, self.argument)
class SphericalHankelBase(SphericalBesselBase):
def _rewrite(self):
return self._eval_rewrite_as_besselj(self.order, self.argument)
@assume_integer_order
def _eval_rewrite_as_besselj(self, nu, z, **kwargs):
# jn +- I*yn
# jn as beeselj: sqrt(pi/(2*z)) * besselj(nu + S.Half, z)
# yn as besselj: (-1)**(nu+1) * sqrt(pi/(2*z)) * besselj(-nu - S.Half, z)
hks = self._hankel_kind_sign
return sqrt(pi/(2*z))*(besselj(nu + S.Half, z) +
hks*I*(-1)**(nu+1)*besselj(-nu - S.Half, z))
@assume_integer_order
def _eval_rewrite_as_bessely(self, nu, z, **kwargs):
# jn +- I*yn
# jn as bessely: (-1)**nu * sqrt(pi/(2*z)) * bessely(-nu - S.Half, z)
# yn as bessely: sqrt(pi/(2*z)) * bessely(nu + S.Half, z)
hks = self._hankel_kind_sign
return sqrt(pi/(2*z))*((-1)**nu*bessely(-nu - S.Half, z) +
hks*I*bessely(nu + S.Half, z))
def _eval_rewrite_as_yn(self, nu, z, **kwargs):
hks = self._hankel_kind_sign
return jn(nu, z).rewrite(yn) + hks*I*yn(nu, z)
def _eval_rewrite_as_jn(self, nu, z, **kwargs):
hks = self._hankel_kind_sign
return jn(nu, z) + hks*I*yn(nu, z).rewrite(jn)
def _eval_expand_func(self, **hints):
if self.order.is_Integer:
return self._expand(**hints)
else:
nu = self.order
z = self.argument
hks = self._hankel_kind_sign
return jn(nu, z) + hks*I*yn(nu, z)
def _expand(self, **hints):
n = self.order
z = self.argument
hks = self._hankel_kind_sign
# fully expanded version
# return ((fn(n, z) * sin(z) +
# (-1)**(n + 1) * fn(-n - 1, z) * cos(z)) + # jn
# (hks * I * (-1)**(n + 1) *
# (fn(-n - 1, z) * hk * I * sin(z) +
# (-1)**(-n) * fn(n, z) * I * cos(z))) # +-I*yn
# )
return (_jn(n, z) + hks*I*_yn(n, z)).expand()
class hn1(SphericalHankelBase):
r"""
Spherical Hankel function of the first kind.
Explanation
===========
This function is defined as
.. math:: h_\nu^(1)(z) = j_\nu(z) + i y_\nu(z),
where $j_\nu(z)$ and $y_\nu(z)$ are the spherical
Bessel function of the first and second kinds.
For integral orders $n$, $h_n^(1)$ is calculated using the formula:
.. math:: h_n^(1)(z) = j_{n}(z) + i (-1)^{n+1} j_{-n-1}(z)
Examples
========
>>> from sympy import Symbol, hn1, hankel1, expand_func, yn, jn
>>> z = Symbol("z")
>>> nu = Symbol("nu", integer=True)
>>> print(expand_func(hn1(nu, z)))
jn(nu, z) + I*yn(nu, z)
>>> print(expand_func(hn1(0, z)))
sin(z)/z - I*cos(z)/z
>>> print(expand_func(hn1(1, z)))
-I*sin(z)/z - cos(z)/z + sin(z)/z**2 - I*cos(z)/z**2
>>> hn1(nu, z).rewrite(jn)
(-1)**(nu + 1)*I*jn(-nu - 1, z) + jn(nu, z)
>>> hn1(nu, z).rewrite(yn)
(-1)**nu*yn(-nu - 1, z) + I*yn(nu, z)
>>> hn1(nu, z).rewrite(hankel1)
sqrt(2)*sqrt(pi)*sqrt(1/z)*hankel1(nu, z)/2
See Also
========
hn2, jn, yn, hankel1, hankel2
References
==========
.. [1] http://dlmf.nist.gov/10.47
"""
_hankel_kind_sign = S.One
@assume_integer_order
def _eval_rewrite_as_hankel1(self, nu, z, **kwargs):
return sqrt(pi/(2*z))*hankel1(nu, z)
class hn2(SphericalHankelBase):
r"""
Spherical Hankel function of the second kind.
Explanation
===========
This function is defined as
.. math:: h_\nu^(2)(z) = j_\nu(z) - i y_\nu(z),
where $j_\nu(z)$ and $y_\nu(z)$ are the spherical
Bessel function of the first and second kinds.
For integral orders $n$, $h_n^(2)$ is calculated using the formula:
.. math:: h_n^(2)(z) = j_{n} - i (-1)^{n+1} j_{-n-1}(z)
Examples
========
>>> from sympy import Symbol, hn2, hankel2, expand_func, jn, yn
>>> z = Symbol("z")
>>> nu = Symbol("nu", integer=True)
>>> print(expand_func(hn2(nu, z)))
jn(nu, z) - I*yn(nu, z)
>>> print(expand_func(hn2(0, z)))
sin(z)/z + I*cos(z)/z
>>> print(expand_func(hn2(1, z)))
I*sin(z)/z - cos(z)/z + sin(z)/z**2 + I*cos(z)/z**2
>>> hn2(nu, z).rewrite(hankel2)
sqrt(2)*sqrt(pi)*sqrt(1/z)*hankel2(nu, z)/2
>>> hn2(nu, z).rewrite(jn)
-(-1)**(nu + 1)*I*jn(-nu - 1, z) + jn(nu, z)
>>> hn2(nu, z).rewrite(yn)
(-1)**nu*yn(-nu - 1, z) - I*yn(nu, z)
See Also
========
hn1, jn, yn, hankel1, hankel2
References
==========
.. [1] http://dlmf.nist.gov/10.47
"""
_hankel_kind_sign = -S.One
@assume_integer_order
def _eval_rewrite_as_hankel2(self, nu, z, **kwargs):
return sqrt(pi/(2*z))*hankel2(nu, z)
def jn_zeros(n, k, method="sympy", dps=15):
"""
Zeros of the spherical Bessel function of the first kind.
Explanation
===========
This returns an array of zeros of $jn$ up to the $k$-th zero.
* method = "sympy": uses `mpmath.besseljzero
<http://mpmath.org/doc/current/functions/bessel.html#mpmath.besseljzero>`_
* method = "scipy": uses the
`SciPy's sph_jn <http://docs.scipy.org/doc/scipy/reference/generated/scipy.special.jn_zeros.html>`_
and
`newton <http://docs.scipy.org/doc/scipy/reference/generated/scipy.optimize.newton.html>`_
to find all
roots, which is faster than computing the zeros using a general
numerical solver, but it requires SciPy and only works with low
precision floating point numbers. (The function used with
method="sympy" is a recent addition to mpmath; before that a general
solver was used.)
Examples
========
>>> from sympy import jn_zeros
>>> jn_zeros(2, 4, dps=5)
[5.7635, 9.095, 12.323, 15.515]
See Also
========
jn, yn, besselj, besselk, bessely
"""
from math import pi
if method == "sympy":
from mpmath import besseljzero
from mpmath.libmp.libmpf import dps_to_prec
from sympy import Expr
prec = dps_to_prec(dps)
return [Expr._from_mpmath(besseljzero(S(n + 0.5)._to_mpmath(prec),
int(l)), prec)
for l in range(1, k + 1)]
elif method == "scipy":
from scipy.optimize import newton
try:
from scipy.special import spherical_jn
f = lambda x: spherical_jn(n, x)
except ImportError:
from scipy.special import sph_jn
f = lambda x: sph_jn(n, x)[0][-1]
else:
raise NotImplementedError("Unknown method.")
def solver(f, x):
if method == "scipy":
root = newton(f, x)
else:
raise NotImplementedError("Unknown method.")
return root
# we need to approximate the position of the first root:
root = n + pi
# determine the first root exactly:
root = solver(f, root)
roots = [root]
for i in range(k - 1):
# estimate the position of the next root using the last root + pi:
root = solver(f, root + pi)
roots.append(root)
return roots
class AiryBase(Function):
"""
Abstract base class for Airy functions.
This class is meant to reduce code duplication.
"""
def _eval_conjugate(self):
return self.func(self.args[0].conjugate())
def _eval_is_extended_real(self):
return self.args[0].is_extended_real
def as_real_imag(self, deep=True, **hints):
z = self.args[0]
zc = z.conjugate()
f = self.func
u = (f(z)+f(zc))/2
v = I*(f(zc)-f(z))/2
return u, v
def _eval_expand_complex(self, deep=True, **hints):
re_part, im_part = self.as_real_imag(deep=deep, **hints)
return re_part + im_part*S.ImaginaryUnit
class airyai(AiryBase):
r"""
The Airy function $\operatorname{Ai}$ of the first kind.
Explanation
===========
The Airy function $\operatorname{Ai}(z)$ is defined to be the function
satisfying Airy's differential equation
.. math::
\frac{\mathrm{d}^2 w(z)}{\mathrm{d}z^2} - z w(z) = 0.
Equivalently, for real $z$
.. math::
\operatorname{Ai}(z) := \frac{1}{\pi}
\int_0^\infty \cos\left(\frac{t^3}{3} + z t\right) \mathrm{d}t.
Examples
========
Create an Airy function object:
>>> from sympy import airyai
>>> from sympy.abc import z
>>> airyai(z)
airyai(z)
Several special values are known:
>>> airyai(0)
3**(1/3)/(3*gamma(2/3))
>>> from sympy import oo
>>> airyai(oo)
0
>>> airyai(-oo)
0
The Airy function obeys the mirror symmetry:
>>> from sympy import conjugate
>>> conjugate(airyai(z))
airyai(conjugate(z))
Differentiation with respect to $z$ is supported:
>>> from sympy import diff
>>> diff(airyai(z), z)
airyaiprime(z)
>>> diff(airyai(z), z, 2)
z*airyai(z)
Series expansion is also supported:
>>> from sympy import series
>>> series(airyai(z), z, 0, 3)
3**(5/6)*gamma(1/3)/(6*pi) - 3**(1/6)*z*gamma(2/3)/(2*pi) + O(z**3)
We can numerically evaluate the Airy function to arbitrary precision
on the whole complex plane:
>>> airyai(-2).evalf(50)
0.22740742820168557599192443603787379946077222541710
Rewrite $\operatorname{Ai}(z)$ in terms of hypergeometric functions:
>>> from sympy import hyper
>>> airyai(z).rewrite(hyper)
-3**(2/3)*z*hyper((), (4/3,), z**3/9)/(3*gamma(1/3)) + 3**(1/3)*hyper((), (2/3,), z**3/9)/(3*gamma(2/3))
See Also
========
airybi: Airy function of the second kind.
airyaiprime: Derivative of the Airy function of the first kind.
airybiprime: Derivative of the Airy function of the second kind.
References
==========
.. [1] https://en.wikipedia.org/wiki/Airy_function
.. [2] http://dlmf.nist.gov/9
.. [3] http://www.encyclopediaofmath.org/index.php/Airy_functions
.. [4] http://mathworld.wolfram.com/AiryFunctions.html
"""
nargs = 1
unbranched = True
@classmethod
def eval(cls, arg):
if arg.is_Number:
if arg is S.NaN:
return S.NaN
elif arg is S.Infinity:
return S.Zero
elif arg is S.NegativeInfinity:
return S.Zero
elif arg.is_zero:
return S.One / (3**Rational(2, 3) * gamma(Rational(2, 3)))
if arg.is_zero:
return S.One / (3**Rational(2, 3) * gamma(Rational(2, 3)))
def fdiff(self, argindex=1):
if argindex == 1:
return airyaiprime(self.args[0])
else:
raise ArgumentIndexError(self, argindex)
@staticmethod
@cacheit
def taylor_term(n, x, *previous_terms):
if n < 0:
return S.Zero
else:
x = sympify(x)
if len(previous_terms) > 1:
p = previous_terms[-1]
return ((3**Rational(1, 3)*x)**(-n)*(3**Rational(1, 3)*x)**(n + 1)*sin(pi*(n*Rational(2, 3) + Rational(4, 3)))*factorial(n) *
gamma(n/3 + Rational(2, 3))/(sin(pi*(n*Rational(2, 3) + Rational(2, 3)))*factorial(n + 1)*gamma(n/3 + Rational(1, 3))) * p)
else:
return (S.One/(3**Rational(2, 3)*pi) * gamma((n+S.One)/S(3)) * sin(2*pi*(n+S.One)/S(3)) /
factorial(n) * (root(3, 3)*x)**n)
def _eval_rewrite_as_besselj(self, z, **kwargs):
ot = Rational(1, 3)
tt = Rational(2, 3)
a = Pow(-z, Rational(3, 2))
if re(z).is_negative:
return ot*sqrt(-z) * (besselj(-ot, tt*a) + besselj(ot, tt*a))
def _eval_rewrite_as_besseli(self, z, **kwargs):
ot = Rational(1, 3)
tt = Rational(2, 3)
a = Pow(z, Rational(3, 2))
if re(z).is_positive:
return ot*sqrt(z) * (besseli(-ot, tt*a) - besseli(ot, tt*a))
else:
return ot*(Pow(a, ot)*besseli(-ot, tt*a) - z*Pow(a, -ot)*besseli(ot, tt*a))
def _eval_rewrite_as_hyper(self, z, **kwargs):
pf1 = S.One / (3**Rational(2, 3)*gamma(Rational(2, 3)))
pf2 = z / (root(3, 3)*gamma(Rational(1, 3)))
return pf1 * hyper([], [Rational(2, 3)], z**3/9) - pf2 * hyper([], [Rational(4, 3)], z**3/9)
def _eval_expand_func(self, **hints):
arg = self.args[0]
symbs = arg.free_symbols
if len(symbs) == 1:
z = symbs.pop()
c = Wild("c", exclude=[z])
d = Wild("d", exclude=[z])
m = Wild("m", exclude=[z])
n = Wild("n", exclude=[z])
M = arg.match(c*(d*z**n)**m)
if M is not None:
m = M[m]
# The transformation is given by 03.05.16.0001.01
# http://functions.wolfram.com/Bessel-TypeFunctions/AiryAi/16/01/01/0001/
if (3*m).is_integer:
c = M[c]
d = M[d]
n = M[n]
pf = (d * z**n)**m / (d**m * z**(m*n))
newarg = c * d**m * z**(m*n)
return S.Half * ((pf + S.One)*airyai(newarg) - (pf - S.One)/sqrt(3)*airybi(newarg))
class airybi(AiryBase):
r"""
The Airy function $\operatorname{Bi}$ of the second kind.
Explanation
===========
The Airy function $\operatorname{Bi}(z)$ is defined to be the function
satisfying Airy's differential equation
.. math::
\frac{\mathrm{d}^2 w(z)}{\mathrm{d}z^2} - z w(z) = 0.
Equivalently, for real $z$
.. math::
\operatorname{Bi}(z) := \frac{1}{\pi}
\int_0^\infty
\exp\left(-\frac{t^3}{3} + z t\right)
+ \sin\left(\frac{t^3}{3} + z t\right) \mathrm{d}t.
Examples
========
Create an Airy function object:
>>> from sympy import airybi
>>> from sympy.abc import z
>>> airybi(z)
airybi(z)
Several special values are known:
>>> airybi(0)
3**(5/6)/(3*gamma(2/3))
>>> from sympy import oo
>>> airybi(oo)
oo
>>> airybi(-oo)
0
The Airy function obeys the mirror symmetry:
>>> from sympy import conjugate
>>> conjugate(airybi(z))
airybi(conjugate(z))
Differentiation with respect to $z$ is supported:
>>> from sympy import diff
>>> diff(airybi(z), z)
airybiprime(z)
>>> diff(airybi(z), z, 2)
z*airybi(z)
Series expansion is also supported:
>>> from sympy import series
>>> series(airybi(z), z, 0, 3)
3**(1/3)*gamma(1/3)/(2*pi) + 3**(2/3)*z*gamma(2/3)/(2*pi) + O(z**3)
We can numerically evaluate the Airy function to arbitrary precision
on the whole complex plane:
>>> airybi(-2).evalf(50)
-0.41230258795639848808323405461146104203453483447240
Rewrite $\operatorname{Bi}(z)$ in terms of hypergeometric functions:
>>> from sympy import hyper
>>> airybi(z).rewrite(hyper)
3**(1/6)*z*hyper((), (4/3,), z**3/9)/gamma(1/3) + 3**(5/6)*hyper((), (2/3,), z**3/9)/(3*gamma(2/3))
See Also
========
airyai: Airy function of the first kind.
airyaiprime: Derivative of the Airy function of the first kind.
airybiprime: Derivative of the Airy function of the second kind.
References
==========
.. [1] https://en.wikipedia.org/wiki/Airy_function
.. [2] http://dlmf.nist.gov/9
.. [3] http://www.encyclopediaofmath.org/index.php/Airy_functions
.. [4] http://mathworld.wolfram.com/AiryFunctions.html
"""
nargs = 1
unbranched = True
@classmethod
def eval(cls, arg):
if arg.is_Number:
if arg is S.NaN:
return S.NaN
elif arg is S.Infinity:
return S.Infinity
elif arg is S.NegativeInfinity:
return S.Zero
elif arg.is_zero:
return S.One / (3**Rational(1, 6) * gamma(Rational(2, 3)))
if arg.is_zero:
return S.One / (3**Rational(1, 6) * gamma(Rational(2, 3)))
def fdiff(self, argindex=1):
if argindex == 1:
return airybiprime(self.args[0])
else:
raise ArgumentIndexError(self, argindex)
@staticmethod
@cacheit
def taylor_term(n, x, *previous_terms):
if n < 0:
return S.Zero
else:
x = sympify(x)
if len(previous_terms) > 1:
p = previous_terms[-1]
return (3**Rational(1, 3)*x * Abs(sin(2*pi*(n + S.One)/S(3))) * factorial((n - S.One)/S(3)) /
((n + S.One) * Abs(cos(2*pi*(n + S.Half)/S(3))) * factorial((n - 2)/S(3))) * p)
else:
return (S.One/(root(3, 6)*pi) * gamma((n + S.One)/S(3)) * Abs(sin(2*pi*(n + S.One)/S(3))) /
factorial(n) * (root(3, 3)*x)**n)
def _eval_rewrite_as_besselj(self, z, **kwargs):
ot = Rational(1, 3)
tt = Rational(2, 3)
a = Pow(-z, Rational(3, 2))
if re(z).is_negative:
return sqrt(-z/3) * (besselj(-ot, tt*a) - besselj(ot, tt*a))
def _eval_rewrite_as_besseli(self, z, **kwargs):
ot = Rational(1, 3)
tt = Rational(2, 3)
a = Pow(z, Rational(3, 2))
if re(z).is_positive:
return sqrt(z)/sqrt(3) * (besseli(-ot, tt*a) + besseli(ot, tt*a))
else:
b = Pow(a, ot)
c = Pow(a, -ot)
return sqrt(ot)*(b*besseli(-ot, tt*a) + z*c*besseli(ot, tt*a))
def _eval_rewrite_as_hyper(self, z, **kwargs):
pf1 = S.One / (root(3, 6)*gamma(Rational(2, 3)))
pf2 = z*root(3, 6) / gamma(Rational(1, 3))
return pf1 * hyper([], [Rational(2, 3)], z**3/9) + pf2 * hyper([], [Rational(4, 3)], z**3/9)
def _eval_expand_func(self, **hints):
arg = self.args[0]
symbs = arg.free_symbols
if len(symbs) == 1:
z = symbs.pop()
c = Wild("c", exclude=[z])
d = Wild("d", exclude=[z])
m = Wild("m", exclude=[z])
n = Wild("n", exclude=[z])
M = arg.match(c*(d*z**n)**m)
if M is not None:
m = M[m]
# The transformation is given by 03.06.16.0001.01
# http://functions.wolfram.com/Bessel-TypeFunctions/AiryBi/16/01/01/0001/
if (3*m).is_integer:
c = M[c]
d = M[d]
n = M[n]
pf = (d * z**n)**m / (d**m * z**(m*n))
newarg = c * d**m * z**(m*n)
return S.Half * (sqrt(3)*(S.One - pf)*airyai(newarg) + (S.One + pf)*airybi(newarg))
class airyaiprime(AiryBase):
r"""
The derivative $\operatorname{Ai}^\prime$ of the Airy function of the first
kind.
Explanation
===========
The Airy function $\operatorname{Ai}^\prime(z)$ is defined to be the
function
.. math::
\operatorname{Ai}^\prime(z) := \frac{\mathrm{d} \operatorname{Ai}(z)}{\mathrm{d} z}.
Examples
========
Create an Airy function object:
>>> from sympy import airyaiprime
>>> from sympy.abc import z
>>> airyaiprime(z)
airyaiprime(z)
Several special values are known:
>>> airyaiprime(0)
-3**(2/3)/(3*gamma(1/3))
>>> from sympy import oo
>>> airyaiprime(oo)
0
The Airy function obeys the mirror symmetry:
>>> from sympy import conjugate
>>> conjugate(airyaiprime(z))
airyaiprime(conjugate(z))
Differentiation with respect to $z$ is supported:
>>> from sympy import diff
>>> diff(airyaiprime(z), z)
z*airyai(z)
>>> diff(airyaiprime(z), z, 2)
z*airyaiprime(z) + airyai(z)
Series expansion is also supported:
>>> from sympy import series
>>> series(airyaiprime(z), z, 0, 3)
-3**(2/3)/(3*gamma(1/3)) + 3**(1/3)*z**2/(6*gamma(2/3)) + O(z**3)
We can numerically evaluate the Airy function to arbitrary precision
on the whole complex plane:
>>> airyaiprime(-2).evalf(50)
0.61825902074169104140626429133247528291577794512415
Rewrite $\operatorname{Ai}^\prime(z)$ in terms of hypergeometric functions:
>>> from sympy import hyper
>>> airyaiprime(z).rewrite(hyper)
3**(1/3)*z**2*hyper((), (5/3,), z**3/9)/(6*gamma(2/3)) - 3**(2/3)*hyper((), (1/3,), z**3/9)/(3*gamma(1/3))
See Also
========
airyai: Airy function of the first kind.
airybi: Airy function of the second kind.
airybiprime: Derivative of the Airy function of the second kind.
References
==========
.. [1] https://en.wikipedia.org/wiki/Airy_function
.. [2] http://dlmf.nist.gov/9
.. [3] http://www.encyclopediaofmath.org/index.php/Airy_functions
.. [4] http://mathworld.wolfram.com/AiryFunctions.html
"""
nargs = 1
unbranched = True
@classmethod
def eval(cls, arg):
if arg.is_Number:
if arg is S.NaN:
return S.NaN
elif arg is S.Infinity:
return S.Zero
if arg.is_zero:
return S.NegativeOne / (3**Rational(1, 3) * gamma(Rational(1, 3)))
def fdiff(self, argindex=1):
if argindex == 1:
return self.args[0]*airyai(self.args[0])
else:
raise ArgumentIndexError(self, argindex)
def _eval_evalf(self, prec):
from mpmath import mp, workprec
from sympy import Expr
z = self.args[0]._to_mpmath(prec)
with workprec(prec):
res = mp.airyai(z, derivative=1)
return Expr._from_mpmath(res, prec)
def _eval_rewrite_as_besselj(self, z, **kwargs):
tt = Rational(2, 3)
a = Pow(-z, Rational(3, 2))
if re(z).is_negative:
return z/3 * (besselj(-tt, tt*a) - besselj(tt, tt*a))
def _eval_rewrite_as_besseli(self, z, **kwargs):
ot = Rational(1, 3)
tt = Rational(2, 3)
a = tt * Pow(z, Rational(3, 2))
if re(z).is_positive:
return z/3 * (besseli(tt, a) - besseli(-tt, a))
else:
a = Pow(z, Rational(3, 2))
b = Pow(a, tt)
c = Pow(a, -tt)
return ot * (z**2*c*besseli(tt, tt*a) - b*besseli(-ot, tt*a))
def _eval_rewrite_as_hyper(self, z, **kwargs):
pf1 = z**2 / (2*3**Rational(2, 3)*gamma(Rational(2, 3)))
pf2 = 1 / (root(3, 3)*gamma(Rational(1, 3)))
return pf1 * hyper([], [Rational(5, 3)], z**3/9) - pf2 * hyper([], [Rational(1, 3)], z**3/9)
def _eval_expand_func(self, **hints):
arg = self.args[0]
symbs = arg.free_symbols
if len(symbs) == 1:
z = symbs.pop()
c = Wild("c", exclude=[z])
d = Wild("d", exclude=[z])
m = Wild("m", exclude=[z])
n = Wild("n", exclude=[z])
M = arg.match(c*(d*z**n)**m)
if M is not None:
m = M[m]
# The transformation is in principle
# given by 03.07.16.0001.01 but note
# that there is an error in this formula.
# http://functions.wolfram.com/Bessel-TypeFunctions/AiryAiPrime/16/01/01/0001/
if (3*m).is_integer:
c = M[c]
d = M[d]
n = M[n]
pf = (d**m * z**(n*m)) / (d * z**n)**m
newarg = c * d**m * z**(n*m)
return S.Half * ((pf + S.One)*airyaiprime(newarg) + (pf - S.One)/sqrt(3)*airybiprime(newarg))
class airybiprime(AiryBase):
r"""
The derivative $\operatorname{Bi}^\prime$ of the Airy function of the first
kind.
Explanation
===========
The Airy function $\operatorname{Bi}^\prime(z)$ is defined to be the
function
.. math::
\operatorname{Bi}^\prime(z) := \frac{\mathrm{d} \operatorname{Bi}(z)}{\mathrm{d} z}.
Examples
========
Create an Airy function object:
>>> from sympy import airybiprime
>>> from sympy.abc import z
>>> airybiprime(z)
airybiprime(z)
Several special values are known:
>>> airybiprime(0)
3**(1/6)/gamma(1/3)
>>> from sympy import oo
>>> airybiprime(oo)
oo
>>> airybiprime(-oo)
0
The Airy function obeys the mirror symmetry:
>>> from sympy import conjugate
>>> conjugate(airybiprime(z))
airybiprime(conjugate(z))
Differentiation with respect to $z$ is supported:
>>> from sympy import diff
>>> diff(airybiprime(z), z)
z*airybi(z)
>>> diff(airybiprime(z), z, 2)
z*airybiprime(z) + airybi(z)
Series expansion is also supported:
>>> from sympy import series
>>> series(airybiprime(z), z, 0, 3)
3**(1/6)/gamma(1/3) + 3**(5/6)*z**2/(6*gamma(2/3)) + O(z**3)
We can numerically evaluate the Airy function to arbitrary precision
on the whole complex plane:
>>> airybiprime(-2).evalf(50)
0.27879516692116952268509756941098324140300059345163
Rewrite $\operatorname{Bi}^\prime(z)$ in terms of hypergeometric functions:
>>> from sympy import hyper
>>> airybiprime(z).rewrite(hyper)
3**(5/6)*z**2*hyper((), (5/3,), z**3/9)/(6*gamma(2/3)) + 3**(1/6)*hyper((), (1/3,), z**3/9)/gamma(1/3)
See Also
========
airyai: Airy function of the first kind.
airybi: Airy function of the second kind.
airyaiprime: Derivative of the Airy function of the first kind.
References
==========
.. [1] https://en.wikipedia.org/wiki/Airy_function
.. [2] http://dlmf.nist.gov/9
.. [3] http://www.encyclopediaofmath.org/index.php/Airy_functions
.. [4] http://mathworld.wolfram.com/AiryFunctions.html
"""
nargs = 1
unbranched = True
@classmethod
def eval(cls, arg):
if arg.is_Number:
if arg is S.NaN:
return S.NaN
elif arg is S.Infinity:
return S.Infinity
elif arg is S.NegativeInfinity:
return S.Zero
elif arg.is_zero:
return 3**Rational(1, 6) / gamma(Rational(1, 3))
if arg.is_zero:
return 3**Rational(1, 6) / gamma(Rational(1, 3))
def fdiff(self, argindex=1):
if argindex == 1:
return self.args[0]*airybi(self.args[0])
else:
raise ArgumentIndexError(self, argindex)
def _eval_evalf(self, prec):
from mpmath import mp, workprec
from sympy import Expr
z = self.args[0]._to_mpmath(prec)
with workprec(prec):
res = mp.airybi(z, derivative=1)
return Expr._from_mpmath(res, prec)
def _eval_rewrite_as_besselj(self, z, **kwargs):
tt = Rational(2, 3)
a = tt * Pow(-z, Rational(3, 2))
if re(z).is_negative:
return -z/sqrt(3) * (besselj(-tt, a) + besselj(tt, a))
def _eval_rewrite_as_besseli(self, z, **kwargs):
ot = Rational(1, 3)
tt = Rational(2, 3)
a = tt * Pow(z, Rational(3, 2))
if re(z).is_positive:
return z/sqrt(3) * (besseli(-tt, a) + besseli(tt, a))
else:
a = Pow(z, Rational(3, 2))
b = Pow(a, tt)
c = Pow(a, -tt)
return sqrt(ot) * (b*besseli(-tt, tt*a) + z**2*c*besseli(tt, tt*a))
def _eval_rewrite_as_hyper(self, z, **kwargs):
pf1 = z**2 / (2*root(3, 6)*gamma(Rational(2, 3)))
pf2 = root(3, 6) / gamma(Rational(1, 3))
return pf1 * hyper([], [Rational(5, 3)], z**3/9) + pf2 * hyper([], [Rational(1, 3)], z**3/9)
def _eval_expand_func(self, **hints):
arg = self.args[0]
symbs = arg.free_symbols
if len(symbs) == 1:
z = symbs.pop()
c = Wild("c", exclude=[z])
d = Wild("d", exclude=[z])
m = Wild("m", exclude=[z])
n = Wild("n", exclude=[z])
M = arg.match(c*(d*z**n)**m)
if M is not None:
m = M[m]
# The transformation is in principle
# given by 03.08.16.0001.01 but note
# that there is an error in this formula.
# http://functions.wolfram.com/Bessel-TypeFunctions/AiryBiPrime/16/01/01/0001/
if (3*m).is_integer:
c = M[c]
d = M[d]
n = M[n]
pf = (d**m * z**(n*m)) / (d * z**n)**m
newarg = c * d**m * z**(n*m)
return S.Half * (sqrt(3)*(pf - S.One)*airyaiprime(newarg) + (pf + S.One)*airybiprime(newarg))
class marcumq(Function):
r"""
The Marcum Q-function.
Explanation
===========
The Marcum Q-function is defined by the meromorphic continuation of
.. math::
Q_m(a, b) = a^{- m + 1} \int_{b}^{\infty} x^{m} e^{- \frac{a^{2}}{2} - \frac{x^{2}}{2}} I_{m - 1}\left(a x\right)\, dx
Examples
========
>>> from sympy import marcumq
>>> from sympy.abc import m, a, b, x
>>> marcumq(m, a, b)
marcumq(m, a, b)
Special values:
>>> marcumq(m, 0, b)
uppergamma(m, b**2/2)/gamma(m)
>>> marcumq(0, 0, 0)
0
>>> marcumq(0, a, 0)
1 - exp(-a**2/2)
>>> marcumq(1, a, a)
1/2 + exp(-a**2)*besseli(0, a**2)/2
>>> marcumq(2, a, a)
1/2 + exp(-a**2)*besseli(0, a**2)/2 + exp(-a**2)*besseli(1, a**2)
Differentiation with respect to $a$ and $b$ is supported:
>>> from sympy import diff
>>> diff(marcumq(m, a, b), a)
a*(-marcumq(m, a, b) + marcumq(m + 1, a, b))
>>> diff(marcumq(m, a, b), b)
-a**(1 - m)*b**m*exp(-a**2/2 - b**2/2)*besseli(m - 1, a*b)
References
==========
.. [1] https://en.wikipedia.org/wiki/Marcum_Q-function
.. [2] http://mathworld.wolfram.com/MarcumQ-Function.html
"""
@classmethod
def eval(cls, m, a, b):
from sympy import exp, uppergamma
if a is S.Zero:
if m is S.Zero and b is S.Zero:
return S.Zero
return uppergamma(m, b**2 * S.Half) / gamma(m)
if m is S.Zero and b is S.Zero:
return 1 - 1 / exp(a**2 * S.Half)
if a == b:
if m is S.One:
return (1 + exp(-a**2) * besseli(0, a**2))*S.Half
if m == 2:
return S.Half + S.Half * exp(-a**2) * besseli(0, a**2) + exp(-a**2) * besseli(1, a**2)
if a.is_zero:
if m.is_zero and b.is_zero:
return S.Zero
return uppergamma(m, b**2*S.Half) / gamma(m)
if m.is_zero and b.is_zero:
return 1 - 1 / exp(a**2*S.Half)
def fdiff(self, argindex=2):
from sympy import exp
m, a, b = self.args
if argindex == 2:
return a * (-marcumq(m, a, b) + marcumq(1+m, a, b))
elif argindex == 3:
return (-b**m / a**(m-1)) * exp(-(a**2 + b**2)/2) * besseli(m-1, a*b)
else:
raise ArgumentIndexError(self, argindex)
def _eval_rewrite_as_Integral(self, m, a, b, **kwargs):
from sympy import Integral, exp, Dummy, oo
x = kwargs.get('x', Dummy('x'))
return a ** (1 - m) * \
Integral(x**m * exp(-(x**2 + a**2)/2) * besseli(m-1, a*x), [x, b, oo])
def _eval_rewrite_as_Sum(self, m, a, b, **kwargs):
from sympy import Sum, exp, Dummy, oo
k = kwargs.get('k', Dummy('k'))
return exp(-(a**2 + b**2) / 2) * Sum((a/b)**k * besseli(k, a*b), [k, 1-m, oo])
def _eval_rewrite_as_besseli(self, m, a, b, **kwargs):
if a == b:
from sympy import exp
if m == 1:
return (1 + exp(-a**2) * besseli(0, a**2)) / 2
if m.is_Integer and m >= 2:
s = sum([besseli(i, a**2) for i in range(1, m)])
return S.Half + exp(-a**2) * besseli(0, a**2) / 2 + exp(-a**2) * s
def _eval_is_zero(self):
if all(arg.is_zero for arg in self.args):
return True
| 29.1939
| 147
| 0.545877
|
bd6f97978454f1b33f650241ebb330d28b4471d2
| 4,800
|
py
|
Python
|
tests/integration/docusaurus/connecting_to_your_data/cloud/s3/spark/inferred_and_runtime_python_example.py
|
victorcouste/great_expectations
|
9ee46d83feb87e13c769e2ae35b899b3f18d73a4
|
[
"Apache-2.0"
] | 6,451
|
2017-09-11T16:32:53.000Z
|
2022-03-31T23:27:49.000Z
|
tests/integration/docusaurus/connecting_to_your_data/cloud/s3/spark/inferred_and_runtime_python_example.py
|
victorcouste/great_expectations
|
9ee46d83feb87e13c769e2ae35b899b3f18d73a4
|
[
"Apache-2.0"
] | 3,892
|
2017-09-08T18:57:50.000Z
|
2022-03-31T23:15:20.000Z
|
tests/integration/docusaurus/connecting_to_your_data/cloud/s3/spark/inferred_and_runtime_python_example.py
|
victorcouste/great_expectations
|
9ee46d83feb87e13c769e2ae35b899b3f18d73a4
|
[
"Apache-2.0"
] | 1,023
|
2017-09-08T15:22:05.000Z
|
2022-03-31T21:17:08.000Z
|
from typing import List
from ruamel import yaml
import great_expectations as ge
from great_expectations.core.batch import Batch, BatchRequest, RuntimeBatchRequest
from great_expectations.data_context import BaseDataContext
from great_expectations.data_context.types.base import (
DataContextConfig,
InMemoryStoreBackendDefaults,
)
# NOTE: InMemoryStoreBackendDefaults SHOULD NOT BE USED in normal settings. You
# may experience data loss as it persists nothing. It is used here for testing.
# Please refer to docs to learn how to instantiate your DataContext.
store_backend_defaults = InMemoryStoreBackendDefaults()
data_context_config = DataContextConfig(
store_backend_defaults=store_backend_defaults,
checkpoint_store_name=store_backend_defaults.checkpoint_store_name,
)
context = BaseDataContext(project_config=data_context_config)
datasource_config = {
"name": "my_s3_datasource",
"class_name": "Datasource",
"execution_engine": {"class_name": "SparkDFExecutionEngine"},
"data_connectors": {
"default_runtime_data_connector_name": {
"class_name": "RuntimeDataConnector",
"batch_identifiers": ["default_identifier_name"],
},
"default_inferred_data_connector_name": {
"class_name": "InferredAssetS3DataConnector",
"bucket": "<YOUR_S3_BUCKET_HERE>",
"prefix": "<BUCKET_PATH_TO_DATA>",
"default_regex": {
"pattern": "(.*)\\.csv",
"group_names": ["data_asset_name"],
},
},
},
}
# Please note this override is only to provide good UX for docs and tests.
# In normal usage you'd set your path directly in the yaml above.
datasource_config["data_connectors"]["default_inferred_data_connector_name"][
"bucket"
] = "superconductive-public"
datasource_config["data_connectors"]["default_inferred_data_connector_name"][
"prefix"
] = "data/taxi_yellow_trip_data_samples/"
context.test_yaml_config(yaml.dump(datasource_config))
context.add_datasource(**datasource_config)
# Here is a RuntimeBatchRequest using a path to a single CSV file
batch_request = RuntimeBatchRequest(
datasource_name="my_s3_datasource",
data_connector_name="default_runtime_data_connector_name",
data_asset_name="<YOUR_MEANGINGFUL_NAME>", # this can be anything that identifies this data_asset for you
runtime_parameters={"path": "<PATH_TO_YOUR_DATA_HERE>"}, # Add your S3 path here.
batch_identifiers={"default_identifier_name": "default_identifier"},
)
# Please note this override is only to provide good UX for docs and tests.
# In normal usage you'd set your path directly in the BatchRequest above.
batch_request.runtime_parameters[
"path"
] = "s3a://superconductive-public/data/taxi_yellow_trip_data_samples/yellow_trip_data_sample_2019-01.csv"
context.create_expectation_suite(
expectation_suite_name="test_suite", overwrite_existing=True
)
validator = context.get_validator(
batch_request=batch_request, expectation_suite_name="test_suite"
)
print(validator.head())
# NOTE: The following code is only for testing and can be ignored by users.
assert isinstance(validator, ge.validator.validator.Validator)
# Here is a BatchRequest naming a data_asset
batch_request = BatchRequest(
datasource_name="my_s3_datasource",
data_connector_name="default_inferred_data_connector_name",
data_asset_name="<YOUR_DATA_ASSET_NAME>",
batch_spec_passthrough={"reader_method": "csv", "reader_options": {"header": True}},
)
# Please note this override is only to provide good UX for docs and tests.
# In normal usage you'd set your data asset name directly in the BatchRequest above.
batch_request.data_asset_name = (
"data/taxi_yellow_trip_data_samples/yellow_trip_data_sample_2019-01"
)
context.create_expectation_suite(
expectation_suite_name="test_suite", overwrite_existing=True
)
validator = context.get_validator(
batch_request=batch_request, expectation_suite_name="test_suite"
)
print(validator.head())
# NOTE: The following code is only for testing and can be ignored by users.
assert isinstance(validator, ge.validator.validator.Validator)
assert [ds["name"] for ds in context.list_datasources()] == ["my_s3_datasource"]
assert set(
context.get_available_data_asset_names()["my_s3_datasource"][
"default_inferred_data_connector_name"
]
) == {
"data/taxi_yellow_trip_data_samples/yellow_trip_data_sample_2019-01",
"data/taxi_yellow_trip_data_samples/yellow_trip_data_sample_2019-02",
"data/taxi_yellow_trip_data_samples/yellow_trip_data_sample_2019-03",
}
batch_list: List[Batch] = context.get_batch_list(batch_request=batch_request)
assert len(batch_list) == 1
batch: Batch = batch_list[0]
assert batch.data.dataframe.count() == 10000
| 38.709677
| 110
| 0.767083
|
7ece2d809cd2e9e3910e49b83689517e8d2aba54
| 2,443
|
py
|
Python
|
app/core/tests/test_models.py
|
2006GTO/recipes-app-api
|
8bc8be28342f650e634350a5460b91521c3560bb
|
[
"MIT"
] | null | null | null |
app/core/tests/test_models.py
|
2006GTO/recipes-app-api
|
8bc8be28342f650e634350a5460b91521c3560bb
|
[
"MIT"
] | null | null | null |
app/core/tests/test_models.py
|
2006GTO/recipes-app-api
|
8bc8be28342f650e634350a5460b91521c3560bb
|
[
"MIT"
] | null | null | null |
from django.test import TestCase
from django.contrib.auth import get_user_model
from core import models
from unittest.mock import patch
def sample_user(email='test@gmail.com', password='qazqazqaz'):
return get_user_model().objects.create_user(email, password)
class ModelTests(TestCase):
def test_create_user_with_email_successfull(self):
# test creating a new user with an email is successfull
email = "test@test.com"
password = "guest"
user = get_user_model().objects.create_user(
email=email,
password=password
)
self.assertEqual(user.email, email)
self.assertTrue(user.check_password, password)
def test_new_user_email_normalized(self):
email = "test@TE.com"
user = get_user_model().objects.create_user(email, 'ew')
self.assertEqual(user.email, email.lower())
def test_new_user_invalid_email(self):
with self.assertRaises(ValueError):
get_user_model().objects.create_user(None, 'test')
def test_create_new_superuser(self):
user = get_user_model().objects.create_superuser(
'q@q.com',
'red'
)
self.assertTrue(user.is_superuser),
self.assertTrue(user.is_staff)
def test_tag_str(self):
tag = models.Tag.objects.create(
user=sample_user(),
name='Meater'
)
self.assertEqual(str(tag), tag.name)
def test_ingredient_str(self):
ingredient = models.Ingredient.objects.create(
user=sample_user(),
name='Melon'
)
self.assertEqual(str(ingredient), ingredient.name)
def test_receipe_str(self):
recipe = models.Recipe.objects.create(
user=sample_user(),
title='Prime Rib',
time_minutes=75,
price=80.00
)
self.assertEqual(str(recipe), recipe.title)
@patch('uuid.uuid4')
def test_recipe_file_name_uuid(self, mock_uuid):
uuid = 'test-uuid'
mock_uuid.return_value = uuid
file_path = models.recipe_image_file_path(None, 'myimage.jpg')
exp_path = f'uploads/recipe/{uuid}.jpg'
self.assertEqual(file_path, exp_path)
| 31.320513
| 71
| 0.585346
|
f0ec1d8ffb55033e51611598a53538c04f685e44
| 2,674
|
py
|
Python
|
duke-cs671-fall21-coupon-recommendation/outputs/rules/RF/17_features/numtrees_20/rule_13.py
|
apcarrik/kaggle
|
6e2d4db58017323e7ba5510bcc2598e01a4ee7bf
|
[
"MIT"
] | null | null | null |
duke-cs671-fall21-coupon-recommendation/outputs/rules/RF/17_features/numtrees_20/rule_13.py
|
apcarrik/kaggle
|
6e2d4db58017323e7ba5510bcc2598e01a4ee7bf
|
[
"MIT"
] | null | null | null |
duke-cs671-fall21-coupon-recommendation/outputs/rules/RF/17_features/numtrees_20/rule_13.py
|
apcarrik/kaggle
|
6e2d4db58017323e7ba5510bcc2598e01a4ee7bf
|
[
"MIT"
] | null | null | null |
def findDecision(obj): #obj[0]: Passanger, obj[1]: Weather, obj[2]: Time, obj[3]: Coupon, obj[4]: Coupon_validity, obj[5]: Gender, obj[6]: Age, obj[7]: Maritalstatus, obj[8]: Children, obj[9]: Education, obj[10]: Occupation, obj[11]: Income, obj[12]: Bar, obj[13]: Coffeehouse, obj[14]: Restaurant20to50, obj[15]: Direction_same, obj[16]: Distance
# {"feature": "Age", "instances": 51, "metric_value": 0.9997, "depth": 1}
if obj[6]<=3:
# {"feature": "Coffeehouse", "instances": 30, "metric_value": 0.9183, "depth": 2}
if obj[13]>0.0:
# {"feature": "Bar", "instances": 23, "metric_value": 0.9877, "depth": 3}
if obj[12]<=1.0:
# {"feature": "Occupation", "instances": 13, "metric_value": 0.9612, "depth": 4}
if obj[10]<=6:
# {"feature": "Time", "instances": 9, "metric_value": 0.9911, "depth": 5}
if obj[2]>0:
# {"feature": "Education", "instances": 7, "metric_value": 0.9852, "depth": 6}
if obj[9]<=0:
# {"feature": "Passanger", "instances": 4, "metric_value": 0.8113, "depth": 7}
if obj[0]>0:
return 'False'
elif obj[0]<=0:
return 'True'
else: return 'True'
elif obj[9]>0:
return 'True'
else: return 'True'
elif obj[2]<=0:
return 'False'
else: return 'False'
elif obj[10]>6:
return 'True'
else: return 'True'
elif obj[12]>1.0:
# {"feature": "Coupon", "instances": 10, "metric_value": 0.7219, "depth": 4}
if obj[3]>0:
return 'False'
elif obj[3]<=0:
# {"feature": "Children", "instances": 3, "metric_value": 0.9183, "depth": 5}
if obj[8]<=0:
return 'True'
elif obj[8]>0:
return 'False'
else: return 'False'
else: return 'True'
else: return 'False'
elif obj[13]<=0.0:
return 'False'
else: return 'False'
elif obj[6]>3:
# {"feature": "Coffeehouse", "instances": 21, "metric_value": 0.8631, "depth": 2}
if obj[13]<=1.0:
# {"feature": "Time", "instances": 12, "metric_value": 1.0, "depth": 3}
if obj[2]>0:
# {"feature": "Bar", "instances": 7, "metric_value": 0.8631, "depth": 4}
if obj[12]<=0.0:
return 'False'
elif obj[12]>0.0:
# {"feature": "Coupon_validity", "instances": 3, "metric_value": 0.9183, "depth": 5}
if obj[4]>0:
return 'True'
elif obj[4]<=0:
return 'False'
else: return 'False'
else: return 'True'
elif obj[2]<=0:
# {"feature": "Coupon", "instances": 5, "metric_value": 0.7219, "depth": 4}
if obj[3]>0:
return 'True'
elif obj[3]<=0:
return 'False'
else: return 'False'
else: return 'True'
elif obj[13]>1.0:
return 'True'
else: return 'True'
else: return 'True'
| 36.630137
| 347
| 0.564323
|
ff27bfd263ab6aeebf7a97cecfee1f1f0419192c
| 804
|
py
|
Python
|
moosetash/handlers.py
|
michaelrccurtis/moosetash
|
bf26152409b991e116d0658e1836ac1b4b521f7d
|
[
"MIT"
] | 2
|
2022-03-09T11:22:45.000Z
|
2022-03-10T03:11:55.000Z
|
moosetash/handlers.py
|
michaelrccurtis/moosetash
|
bf26152409b991e116d0658e1836ac1b4b521f7d
|
[
"MIT"
] | null | null | null |
moosetash/handlers.py
|
michaelrccurtis/moosetash
|
bf26152409b991e116d0658e1836ac1b4b521f7d
|
[
"MIT"
] | null | null | null |
from typing import Any
from .exceptions import MissingPartial, MissingVariable
def missing_variable_default(variable_name: str, variable_tag: str) -> str:
return ''
def missing_variable_raise(variable_name: str, variable_tag: str):
raise MissingVariable(variable_name)
def missing_variable_keep(variable_name: str, variable_tag: str) -> str:
return variable_tag
def missing_partial_default(partial_name: str, partial_tag: str) -> str:
return ''
def missing_partial_raise(partial_name: str, partial_tag: str) -> str:
raise MissingPartial(partial_name)
def missing_partial_keep(partial_name: str, partial_tag: str) -> str:
return partial_tag
def default_serializer(value: Any) -> str:
"""By default, serialize variables as by stringifying"""
return str(value)
| 25.125
| 75
| 0.761194
|
db60e040c4e38d6124380de0e59fd23a3a990009
| 3,707
|
py
|
Python
|
tfx/tools/cli/container_builder/builder.py
|
Anon-Artist/tfx
|
2692c9ab437d76b5d9517996bfe2596862e0791d
|
[
"Apache-2.0"
] | 3
|
2020-07-20T18:37:16.000Z
|
2021-11-17T11:24:27.000Z
|
tfx/tools/cli/container_builder/builder.py
|
Anon-Artist/tfx
|
2692c9ab437d76b5d9517996bfe2596862e0791d
|
[
"Apache-2.0"
] | 2
|
2020-08-11T00:19:14.000Z
|
2020-08-26T20:10:31.000Z
|
tfx/tools/cli/container_builder/builder.py
|
Anon-Artist/tfx
|
2692c9ab437d76b5d9517996bfe2596862e0791d
|
[
"Apache-2.0"
] | 1
|
2021-01-28T13:41:51.000Z
|
2021-01-28T13:41:51.000Z
|
# Lint as: python2, python3
# Copyright 2019 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""ContainerBuilder builds the container image."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
from typing import Optional, Text
import click
from tfx.tools.cli.container_builder import buildspec
from tfx.tools.cli.container_builder import labels
from tfx.tools.cli.container_builder.dockerfile import Dockerfile
from tfx.tools.cli.container_builder.skaffold_cli import SkaffoldCli
# TODO(b/142357382): add e2e tests.
class ContainerBuilder(object):
"""Build containers.
ContainerBuilder prepares the build files and run Skaffold to build the
containers.
Attributes:
_buildspec: BuildSpec instance.
_skaffold_cmd: Skaffold command.
"""
def __init__(self,
target_image: Optional[Text] = None,
base_image: Optional[Text] = None,
skaffold_cmd: Optional[Text] = None,
buildspec_filename: Optional[Text] = None,
dockerfile_name: Optional[Text] = None,
setup_py_filename: Optional[Text] = None):
"""Initialization.
Args:
target_image: the target image path to be built.
base_image: the image path to use as the base image.
skaffold_cmd: skaffold command.
buildspec_filename: the buildspec file path that is accessible to the
current execution environment. It could be either absolute path or
relative path.
dockerfile_name: the dockerfile name, which is stored in the workspace
directory. The workspace directory is specified in the build spec and
the default workspace directory is '.'.
setup_py_filename: the setup.py file name, which is used to build a
python package for the workspace directory. If not specified, the
whole directory is copied and PYTHONPATH is configured.
"""
self._skaffold_cmd = skaffold_cmd or labels.SKAFFOLD_COMMAND
buildspec_filename = buildspec_filename or labels.BUILD_SPEC_FILENAME
dockerfile_name = dockerfile_name or labels.DOCKERFILE_NAME
if os.path.exists(buildspec_filename):
self._buildspec = buildspec.BuildSpec(filename=buildspec_filename)
if target_image is not None:
click.echo(
'Target image %s is not used. If the build spec is '
'provided, update the target image in the build spec '
'file %s.' % (target_image, buildspec_filename))
else:
self._buildspec = buildspec.BuildSpec.load_default(
filename=buildspec_filename,
target_image=target_image,
dockerfile_name=dockerfile_name)
Dockerfile(
filename=os.path.join(self._buildspec.build_context, dockerfile_name),
setup_py_filename=setup_py_filename,
base_image=base_image)
def build(self):
"""Build the container and return the built image path with SHA."""
skaffold_cli = SkaffoldCli(cmd=self._skaffold_cmd)
image_sha = skaffold_cli.build(self._buildspec)
target_image = self._buildspec.target_image
return target_image + '@' + image_sha
| 39.021053
| 78
| 0.724845
|
52e62603ef4b935ed48ba5e1af55b5b1e04418e5
| 184
|
py
|
Python
|
models/mtg/__init__.py
|
shinji19/sealed-deck-generator
|
8321d023fbef3a4b58c37fe36ac9b225b22bb4d1
|
[
"MIT"
] | null | null | null |
models/mtg/__init__.py
|
shinji19/sealed-deck-generator
|
8321d023fbef3a4b58c37fe36ac9b225b22bb4d1
|
[
"MIT"
] | null | null | null |
models/mtg/__init__.py
|
shinji19/sealed-deck-generator
|
8321d023fbef3a4b58c37fe36ac9b225b22bb4d1
|
[
"MIT"
] | null | null | null |
from .card_score_master_repository import CardScoreMasterRepository
from .card_master import CardMaster
from .card_master_repository import CardMasterRepository
from .deck import Deck
| 36.8
| 67
| 0.891304
|
673e8009a489382d3986c2e60dc554e6ebf87d09
| 2,710
|
py
|
Python
|
tests/buffer/test_prioritized_buffer.py
|
krishanrana/rl_algorithms
|
c12fe447a70f2f99f37f6c1157907755d38fde81
|
[
"MIT"
] | null | null | null |
tests/buffer/test_prioritized_buffer.py
|
krishanrana/rl_algorithms
|
c12fe447a70f2f99f37f6c1157907755d38fde81
|
[
"MIT"
] | null | null | null |
tests/buffer/test_prioritized_buffer.py
|
krishanrana/rl_algorithms
|
c12fe447a70f2f99f37f6c1157907755d38fde81
|
[
"MIT"
] | null | null | null |
from typing import List, Tuple
import numpy as np
from scipy.stats import ks_2samp
from rl_algorithms.common.buffer.replay_buffer import ReplayBuffer
from rl_algorithms.common.buffer.wrapper import PrioritizedBufferWrapper
def generate_prioritized_buffer(
buffer_length: int, batch_size: int, idx_lst=None, prior_lst=None
) -> Tuple[PrioritizedBufferWrapper, List]:
"""Generate Prioritized Replay Buffer with random Prior."""
buffer = ReplayBuffer(max_len=buffer_length, batch_size=batch_size)
prioritized_buffer = PrioritizedBufferWrapper(buffer)
priority = np.random.randint(10, size=buffer_length)
for i, j in enumerate(priority):
prioritized_buffer.sum_tree[i] = j
if idx_lst:
for i, j in list(zip(idx_lst, prior_lst)):
priority[i] = j
prioritized_buffer.sum_tree[i] = j
prop_lst = [i / sum(priority) for i in priority]
prioritized_buffer.buffer.length = buffer_length
return prioritized_buffer, prop_lst
def sample_dummy(prioritized_buffer: PrioritizedBufferWrapper, times: int) -> List:
"""Sample from prioritized buffer and Return indices."""
assert isinstance(prioritized_buffer, PrioritizedBufferWrapper)
sampled_lst = [0] * prioritized_buffer.buffer.max_len
for _ in range(times):
indices = prioritized_buffer._sample_proportional(
prioritized_buffer.buffer.batch_size
)
for idx in indices:
sampled_lst[idx] += 1 / (times * prioritized_buffer.buffer.batch_size)
return sampled_lst
def check_prioritized(prop_lst: List, sampled_lst: List) -> bool:
"""Check two input lists have same distribution by kstest.
Reference:
https://en.wikipedia.org/wiki/Kolmogorov%E2%80%93Smirnov_test
"""
res = ks_2samp(prop_lst, sampled_lst)
return res[1] >= 0.05
def test_prioritized(buffer_length=32, batch_size=4):
"""Test whether transitions are prioritized sampled from replay buffer."""
n_repeat = 1000
idx_lst = [0, 1, 2, 3]
prior_lst = [100, 10, 1, 1]
# generate prioitized buffer, return buffer and its proportion
buffer, prop = generate_prioritized_buffer(
buffer_length, batch_size, idx_lst, prior_lst
)
assert isinstance(buffer, PrioritizedBufferWrapper)
sampled_lst = [0] * buffer.buffer.max_len
# sample index from buffer
for _ in range(n_repeat):
indices = buffer._sample_proportional(buffer.buffer.batch_size)
for idx in indices:
sampled_lst[idx] += 1 / (n_repeat * buffer.buffer.batch_size)
assert check_prioritized(prop, sampled_lst), "Two distributions are different."
if __name__ == "__main__":
test_prioritized()
| 34.74359
| 83
| 0.715498
|
db8ae692652dfade0664459977d3318d1ffe00fd
| 6,211
|
py
|
Python
|
core/self6dpp/models/model_utils_deepim.py
|
THU-DA-6D-Pose-Group/self6dpp
|
c267cfa55e440e212136a5e9940598720fa21d16
|
[
"Apache-2.0"
] | 33
|
2021-12-15T07:11:47.000Z
|
2022-03-29T08:58:32.000Z
|
core/self6dpp/models/model_utils_deepim.py
|
THU-DA-6D-Pose-Group/self6dpp
|
c267cfa55e440e212136a5e9940598720fa21d16
|
[
"Apache-2.0"
] | 3
|
2021-12-15T11:39:54.000Z
|
2022-03-29T07:24:23.000Z
|
core/self6dpp/models/model_utils_deepim.py
|
THU-DA-6D-Pose-Group/self6dpp
|
c267cfa55e440e212136a5e9940598720fa21d16
|
[
"Apache-2.0"
] | null | null | null |
import torch
import copy
import numpy as np
from lib.pysixd.pose_error import re, te
from core.utils.pose_utils import quat2mat_torch
from core.utils.rot_reps import rot6d_to_mat_batch
from core.utils import lie_algebra, quaternion_lf
from .net_factory import HEADS
def get_rot_dim(rot_type):
if rot_type in ["allo_quat", "ego_quat"]:
rot_dim = 4
elif rot_type in [
"allo_log_quat",
"ego_log_quat",
"allo_lie_vec",
"ego_lie_vec",
]:
rot_dim = 3
elif rot_type in ["allo_rot6d", "ego_rot6d"]:
rot_dim = 6
else:
raise ValueError(f"Unknown rot_type: {rot_type}")
return rot_dim
def get_rot_mat(rot, rot_type):
if rot_type in ["ego_quat", "allo_quat"]:
rot_m = quat2mat_torch(rot)
elif rot_type in ["ego_log_quat", "allo_log_quat"]:
# from latentfusion (lf)
rot_m = quat2mat_torch(quaternion_lf.qexp(rot))
elif rot_type in ["ego_lie_vec", "allo_lie_vec"]:
rot_m = lie_algebra.lie_vec_to_rot(rot)
elif rot_type in ["ego_rot6d", "allo_rot6d"]:
rot_m = rot6d_to_mat_batch(rot)
else:
raise ValueError(f"Wrong pred_rot type: {rot_type}")
return rot_m
def get_mask_dim(mask_loss_type):
if mask_loss_type in ["L1", "BCE", "RW_BCE", "dice"]:
mask_out_dim = 1
elif mask_loss_type in ["CE"]:
mask_out_dim = 2
else:
raise NotImplementedError(f"unknown mask loss type: {mask_loss_type}")
return mask_out_dim
def get_pose_head(cfg):
net_cfg = cfg.MODEL.DEEPIM
pose_head_cfg = net_cfg.POSE_HEAD
params_lr_list = []
rot_type = pose_head_cfg.ROT_TYPE
rot_dim = get_rot_dim(rot_type)
pose_num_classes = net_cfg.NUM_CLASSES if pose_head_cfg.CLASS_AWARE else 1
pose_head_init_cfg = copy.deepcopy(pose_head_cfg.INIT_CFG)
pose_head_type = pose_head_init_cfg.pop("type")
pose_head_init_cfg.update(rot_dim=rot_dim, num_classes=pose_num_classes)
pose_head = HEADS[pose_head_type](**pose_head_init_cfg)
if pose_head_cfg.FREEZE:
for param in pose_head.parameters():
with torch.no_grad():
param.requires_grad = False
else:
params_lr_list.append(
{
"params": filter(lambda p: p.requires_grad, pose_head.parameters()),
"lr": float(cfg.SOLVER.BASE_LR) * pose_head_cfg.LR_MULT,
}
)
return pose_head, params_lr_list
def get_mask_head(cfg, is_test=False):
net_cfg = cfg.MODEL.DEEPIM
mask_head_cfg = net_cfg.MASK_HEAD
params_lr_list = []
if mask_head_cfg.ENABLED:
if is_test and not cfg.TEST.OUTPUT_MASK:
mask_head = None
else:
mask_dim = get_mask_dim(net_cfg.LOSS_CFG.MASK_LOSS_TYPE)
mask_num_classes = net_cfg.NUM_CLASSES if mask_head_cfg.CLASS_AWARE else 1
mask_head_init_cfg = copy.deepcopy(mask_head_cfg.INIT_CFG)
mask_head_type = mask_head_init_cfg.pop("type")
mask_head_init_cfg.update(num_classes=mask_num_classes, out_dim=mask_dim)
mask_head = HEADS[mask_head_type](**mask_head_init_cfg)
if mask_head_cfg.FREEZE:
for param in mask_head.parameters():
with torch.no_grad():
param.requires_grad = False
else:
params_lr_list.append(
{
"params": filter(lambda p: p.requires_grad, mask_head.parameters()),
"lr": float(cfg.SOLVER.BASE_LR) * mask_head_cfg.LR_MULT,
}
)
else:
mask_head = None
return mask_head, params_lr_list
def get_flow_head(cfg, is_test=False):
net_cfg = cfg.MODEL.DEEPIM
flow_head_cfg = net_cfg.FLOW_HEAD
params_lr_list = []
if flow_head_cfg.ENABLED:
if is_test and not cfg.TEST.OUTPUT_FLOW:
flow_head = None
else:
flow_num_classes = net_cfg.NUM_CLASSES if flow_head_cfg.CLASS_AWARE else 1
flow_head_init_cfg = copy.deepcopy(flow_head_cfg.INIT_CFG)
flow_head_type = flow_head_init_cfg.pop("type")
flow_head_init_cfg.update(num_classes=flow_num_classes)
flow_head = HEADS[flow_head_type](**flow_head_init_cfg)
if flow_head_cfg.FREEZE:
for param in flow_head.parameters():
with torch.no_grad():
param.requires_grad = False
else:
params_lr_list.append(
{
"params": filter(lambda p: p.requires_grad, flow_head.parameters()),
"lr": float(cfg.SOLVER.BASE_LR) * flow_head_cfg.LR_MULT,
}
)
else:
flow_head = None
return flow_head, params_lr_list
def get_mask_prob(pred_mask, mask_loss_type):
# (b,c,h,w)
# output: (b, 1, h, w)
bs, c, h, w = pred_mask.shape
if mask_loss_type == "L1":
assert c == 1, c
mask_max = torch.max(pred_mask.view(bs, -1), dim=-1)[0].view(bs, 1, 1, 1)
mask_min = torch.min(pred_mask.view(bs, -1), dim=-1)[0].view(bs, 1, 1, 1)
# [0, 1]
mask_prob = (pred_mask - mask_min) / (mask_max - mask_min) # + 1e-6)
elif mask_loss_type in ["BCE", "RW_BCE", "dice"]:
assert c == 1, c
mask_prob = torch.sigmoid(pred_mask)
elif mask_loss_type == "CE":
mask_prob = torch.softmax(pred_mask, dim=1, keepdim=True)[:, 1:2, :, :]
else:
raise NotImplementedError(f"unknown mask loss type: {mask_loss_type}")
return mask_prob
def compute_mean_re_te(pred_transes, pred_rots, gt_transes, gt_rots):
pred_transes = pred_transes.detach().cpu().numpy()
pred_rots = pred_rots.detach().cpu().numpy()
gt_transes = gt_transes.detach().cpu().numpy()
gt_rots = gt_rots.detach().cpu().numpy()
bs = pred_rots.shape[0]
R_errs = np.zeros((bs,), dtype=np.float32)
T_errs = np.zeros((bs,), dtype=np.float32)
for i in range(bs):
R_errs[i] = re(pred_rots[i], gt_rots[i])
T_errs[i] = te(pred_transes[i], gt_transes[i])
return R_errs.mean(), T_errs.mean()
| 34.698324
| 92
| 0.622927
|
dc3d35304c67ecd9b8ccf2437032a784d321a25d
| 2,415
|
py
|
Python
|
back/sequences/emails.py
|
kingzbauer/ChiefOnboarding
|
202092a367aade9e032286466e06399ea07f1908
|
[
"MIT"
] | null | null | null |
back/sequences/emails.py
|
kingzbauer/ChiefOnboarding
|
202092a367aade9e032286466e06399ea07f1908
|
[
"MIT"
] | null | null | null |
back/sequences/emails.py
|
kingzbauer/ChiefOnboarding
|
202092a367aade9e032286466e06399ea07f1908
|
[
"MIT"
] | null | null | null |
from django.core.mail import send_mail
from django.template import Template, Context
from django.template.loader import render_to_string
from django.conf import settings
from django.utils.translation import ugettext as _
from django.utils import translation
from organization.models import Organization
def send_sequence_message(new_hire, message):
org = Organization.object.get()
subject = _("Here is an update!")
for i in message:
i['text'] = new_hire.personalize(i['text'])
html_message = render_to_string("email/base.html",
{'org': org, 'content': message})
send_mail(subject, '', settings.DEFAULT_FROM_EMAIL, [new_hire.email], html_message=html_message)
def send_sequence_update_message(new_hire, message):
org = Organization.object.get()
subject = _("Here is an update!")
blocks = []
if len(message['to_do']) > 0:
text = _('Todo item')
if len(message['to_do']) > 1:
text = _('Todo items')
blocks.append({
"type": 'p',
"text": text
})
text = ""
for i in message['to_do']:
text += '- ' + new_hire.personalize(i.name) + '<br />'
blocks.append({
"type": 'block',
"text": text
})
if len(message['resources']) > 0:
text = _('Resource')
if len(message['resources']) > 1:
text = _('Resources')
blocks.append({
"type": 'p',
"text": text
})
text = ""
for i in message['resources']:
text += '- ' + new_hire.personalize(i.name) + '<br />'
blocks.append({
"type": 'block',
"text": text
})
if len(message['badges']) > 0:
text = _('Badge')
if len(message['badges']) > 1:
text = _('Badges')
blocks.append({
"type": 'p',
"text": text
})
text = ""
for i in message['badges']:
text += '- ' + new_hire.personalize(i.name) + '<br />'
blocks.append({
"type": 'block',
"text": text
})
html_message = render_to_string("email/base.html",
{'org': org, 'content': blocks})
message = ""
send_mail(subject, message, settings.DEFAULT_FROM_EMAIL, [new_hire.email], html_message=html_message)
| 33.082192
| 105
| 0.536232
|
067a6b49dbece689a80ee6d32b8a311e76c567e0
| 3,420
|
py
|
Python
|
python/londiste/compare.py
|
askoja/skytools
|
a3dd719119849350445f2ef08c736ff9b25aa006
|
[
"0BSD"
] | 1
|
2016-04-11T13:26:34.000Z
|
2016-04-11T13:26:34.000Z
|
python/londiste/compare.py
|
askoja/skytools
|
a3dd719119849350445f2ef08c736ff9b25aa006
|
[
"0BSD"
] | null | null | null |
python/londiste/compare.py
|
askoja/skytools
|
a3dd719119849350445f2ef08c736ff9b25aa006
|
[
"0BSD"
] | null | null | null |
#! /usr/bin/env python
"""Compares tables in replication set.
Currently just does count(1) on both sides.
"""
import sys, skytools
__all__ = ['Comparator']
from londiste.syncer import Syncer
class Comparator(Syncer):
"""Simple checker based in Syncer.
When tables are in sync runs simple SQL query on them.
"""
def process_sync(self, t1, t2, src_db, dst_db):
"""Actual comparision."""
src_tbl = t1.dest_table
dst_tbl = t2.dest_table
src_curs = src_db.cursor()
dst_curs = dst_db.cursor()
dst_where = t2.plugin.get_copy_condition(src_curs, dst_curs)
src_where = dst_where
self.log.info('Counting %s' % dst_tbl)
# get common cols
cols = self.calc_cols(src_curs, src_tbl, dst_curs, dst_tbl)
# get sane query
v1 = src_db.server_version
v2 = dst_db.server_version
if (v1 < 80400 or v2 < 80400) and v1 != v2:
q = "select count(1) as cnt, sum(('x'||substr(md5(_COLS_::text),1,16))::bit(64)::bigint) as chksum from only _TABLE_"
else:
q = "select count(1) as cnt, sum(hashtext(_COLS_::text)::bigint) as chksum from only _TABLE_"
q = self.cf.get('compare_sql', q)
q = q.replace("_COLS_", cols)
src_q = q.replace('_TABLE_', skytools.quote_fqident(src_tbl))
if src_where:
src_q = src_q + " WHERE " + src_where
dst_q = q.replace('_TABLE_', skytools.quote_fqident(dst_tbl))
if dst_where:
dst_q = dst_q + " WHERE " + dst_where
f = "%(cnt)d rows, checksum=%(chksum)s"
f = self.cf.get('compare_fmt', f)
self.log.debug("srcdb: " + src_q)
src_curs.execute(src_q)
src_row = src_curs.fetchone()
src_str = f % src_row
self.log.info("srcdb: %s" % src_str)
src_db.commit()
self.log.debug("dstdb: " + dst_q)
dst_curs.execute(dst_q)
dst_row = dst_curs.fetchone()
dst_str = f % dst_row
self.log.info("dstdb: %s" % dst_str)
dst_db.commit()
if src_str != dst_str:
self.log.warning("%s: Results do not match!" % dst_tbl)
return 1
return 0
def calc_cols(self, src_curs, src_tbl, dst_curs, dst_tbl):
cols1 = self.load_cols(src_curs, src_tbl)
cols2 = self.load_cols(dst_curs, dst_tbl)
qcols = []
for c in self.calc_common(cols1, cols2):
qcols.append(skytools.quote_ident(c))
return "(%s)" % ",".join(qcols)
def load_cols(self, curs, tbl):
schema, table = skytools.fq_name_parts(tbl)
q = "select column_name from information_schema.columns"\
" where table_schema = %s and table_name = %s"
curs.execute(q, [schema, table])
cols = []
for row in curs.fetchall():
cols.append(row[0])
return cols
def calc_common(self, cols1, cols2):
common = []
map2 = {}
for c in cols2:
map2[c] = 1
for c in cols1:
if c in map2:
common.append(c)
if len(common) == 0:
raise Exception("no common columns found")
if len(common) != len(cols1) or len(cols2) != len(cols1):
self.log.warning("Ignoring some columns")
return common
if __name__ == '__main__':
script = Comparator(sys.argv[1:])
script.start()
| 30.265487
| 129
| 0.577778
|
c3aa7add1744760ac6029f8b6a0bd55126b43854
| 59
|
py
|
Python
|
src/hello.py
|
bhattvishal/machinelearning-azure-amlws
|
3e200a69f40d20f7058f61be8101d65575832970
|
[
"MIT"
] | null | null | null |
src/hello.py
|
bhattvishal/machinelearning-azure-amlws
|
3e200a69f40d20f7058f61be8101d65575832970
|
[
"MIT"
] | null | null | null |
src/hello.py
|
bhattvishal/machinelearning-azure-amlws
|
3e200a69f40d20f7058f61be8101d65575832970
|
[
"MIT"
] | null | null | null |
print("Hello World, I am new to Azure Machine Learning...")
| 59
| 59
| 0.728814
|
a020ae4679fe329fee65c453fc00914ca3bcaaa6
| 401
|
py
|
Python
|
WEEKS/CD_Sata-Structures/_RESOURCES/python-prac/Overflow/_Learning/04b-complex-try-blocks.py
|
webdevhub42/Lambda
|
b04b84fb5b82fe7c8b12680149e25ae0d27a0960
|
[
"MIT"
] | 5
|
2021-06-02T23:44:25.000Z
|
2021-12-27T16:21:57.000Z
|
WEEKS/CD_Sata-Structures/_RESOURCES/python-prac/Overflow/_Learning/04b-complex-try-blocks.py
|
webdevhub42/Lambda
|
b04b84fb5b82fe7c8b12680149e25ae0d27a0960
|
[
"MIT"
] | 22
|
2021-05-31T01:33:25.000Z
|
2021-10-18T18:32:39.000Z
|
WEEKS/CD_Sata-Structures/_RESOURCES/python-prac/Overflow/_Learning/04b-complex-try-blocks.py
|
webdevhub42/Lambda
|
b04b84fb5b82fe7c8b12680149e25ae0d27a0960
|
[
"MIT"
] | 3
|
2021-06-19T03:37:47.000Z
|
2021-08-31T00:49:51.000Z
|
# More complex try blocks
import sys
a = "False"
b = 6
c = 2
try:
print(len(a))
print(b / c)
print(a[47])
except TypeError:
print(f"{a} has no length")
except ZeroDivisionError as err:
print(f"Cannot divide by zero! Error: {err}")
except:
print(f"Uh oh, unknown error: {sys.exc_info()}")
else:
print("No errors! Nice!")
finally:
print("Thanks for using the program!")
| 19.095238
| 52
| 0.63591
|
ed41ba93cb1b2469c03d120042d83076fdc747ca
| 4,397
|
py
|
Python
|
ml/rl/test/gym/open_ai_gym_memory_pool.py
|
mikepsinn/Horizon
|
4ce123062320c0297b80135e0b63759c02bf5699
|
[
"BSD-3-Clause"
] | 1
|
2019-12-04T05:05:56.000Z
|
2019-12-04T05:05:56.000Z
|
ml/rl/test/gym/open_ai_gym_memory_pool.py
|
weiddeng/Horizon
|
0e7fe9d742c408e1b42803b42e104efbc56bae5b
|
[
"BSD-3-Clause"
] | null | null | null |
ml/rl/test/gym/open_ai_gym_memory_pool.py
|
weiddeng/Horizon
|
0e7fe9d742c408e1b42803b42e104efbc56bae5b
|
[
"BSD-3-Clause"
] | 1
|
2019-09-20T02:36:30.000Z
|
2019-09-20T02:36:30.000Z
|
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
import numpy as np
import torch
from caffe2.python import workspace
from ml.rl.test.gym.open_ai_gym_environment import ModelType
from ml.rl.training.training_data_page import TrainingDataPage
class OpenAIGymMemoryPool:
def __init__(self, max_replay_memory_size):
"""
Creates an OpenAIGymMemoryPool object.
:param max_replay_memory_size: Upper bound on the number of transitions
to store in replay memory.
"""
self.replay_memory = []
self.max_replay_memory_size = max_replay_memory_size
self.memory_num = 0
self.skip_insert_until = self.max_replay_memory_size
def sample_memories(self, batch_size, model_type):
"""
Samples transitions from replay memory uniformly at random.
:param batch_size: Number of sampled transitions to return.
:param model_type: Model type (discrete, parametric).
"""
cols = [[], [], [], [], [], [], [], [], []]
indices = np.random.permutation(len(self.replay_memory))[:batch_size]
for idx in indices:
memory = self.replay_memory[idx]
for col, value in zip(cols, memory):
col.append(value)
possible_next_actions_lengths = torch.tensor(cols[7], dtype=torch.int32)
next_states = torch.tensor(cols[3], dtype=torch.float32)
if model_type == ModelType.PYTORCH_PARAMETRIC_DQN.value:
possible_next_actions = []
for pna_matrix in cols[6]:
for row in pna_matrix:
possible_next_actions.append(row)
tiled_states = torch.from_numpy(
np.repeat(
next_states.numpy(), possible_next_actions_lengths.numpy(), axis=0
)
)
possible_next_actions = torch.tensor(
possible_next_actions, dtype=torch.float32
)
possible_next_actions_state_concat = torch.cat(
(tiled_states, possible_next_actions), dim=1
)
else:
if cols[6] is None or cols[6][0] is None:
possible_next_actions = None
else:
possible_next_actions = torch.tensor(cols[6], dtype=torch.float32)
possible_next_actions_state_concat = None
return TrainingDataPage(
states=torch.tensor(cols[0], dtype=torch.float32),
actions=torch.tensor(cols[1], dtype=torch.float32),
propensities=None,
rewards=torch.tensor(cols[2], dtype=torch.float32).reshape(-1, 1),
next_states=torch.tensor(cols[3], dtype=torch.float32),
next_actions=torch.tensor(cols[4], dtype=torch.float32),
possible_next_actions=possible_next_actions,
not_terminals=torch.from_numpy(
np.logical_not(np.array(cols[5]), dtype=np.bool).astype(np.int32)
).reshape(-1, 1),
time_diffs=torch.tensor(cols[8], dtype=torch.int32).reshape(-1, 1),
possible_next_actions_lengths=possible_next_actions_lengths,
possible_next_actions_state_concat=possible_next_actions_state_concat,
)
def insert_into_memory(
self,
state,
action,
reward,
next_state,
next_action,
terminal,
possible_next_actions,
possible_next_actions_lengths,
time_diff,
):
"""
Inserts transition into replay memory in such a way that retrieving
transitions uniformly at random will be equivalent to reservoir sampling.
"""
item = (
state,
action,
reward,
next_state,
next_action,
terminal,
possible_next_actions,
possible_next_actions_lengths,
time_diff,
)
if self.memory_num < self.max_replay_memory_size:
self.replay_memory.append(item)
elif self.memory_num >= self.skip_insert_until:
p = float(self.max_replay_memory_size) / self.memory_num
self.skip_insert_until += np.random.geometric(p)
rand_index = np.random.randint(self.max_replay_memory_size)
self.replay_memory[rand_index] = item
self.memory_num += 1
| 37.581197
| 86
| 0.615192
|
247b8fd4f4b0bae76f2aef60f27684dc06edbd74
| 911
|
py
|
Python
|
queryUser.py
|
abgcsc/CDANs
|
7113ed836df1369895054b11c121071faa8392af
|
[
"MIT"
] | 7
|
2018-03-04T17:12:13.000Z
|
2022-02-08T03:35:40.000Z
|
queryUser.py
|
abgcsc/CDANs
|
7113ed836df1369895054b11c121071faa8392af
|
[
"MIT"
] | null | null | null |
queryUser.py
|
abgcsc/CDANs
|
7113ed836df1369895054b11c121071faa8392af
|
[
"MIT"
] | 4
|
2018-02-12T22:37:22.000Z
|
2018-12-20T13:16:32.000Z
|
# -*- coding: utf-8 -*-
"""
Created on Tue Jul 05 11:09:29 2016
@author: Drew
"""
import sys
def queryUser(question):
"""
Ask a simple yes/no question of the user.
"""
valid = {"yes": True, "y": True, "ye": True,
"no": False, "n": False}
while True:
try:
sys.stdout.write(question + '[y/n]')
choice = raw_input().lower()
if choice == '':
return valid['y']
elif choice in valid:
return valid[choice]
else:
sys.stdout.write("Please respond with 'yes' or 'no' "
"(or 'y' or 'n').\n")
except KeyboardInterrupt:
# turns out this doesn't fix the problem with IPython console
# console freezes if Ctrl-C during raw-input()
sys.stdout.write("'No' answer assumed.")
return False
| 28.46875
| 73
| 0.493963
|
90998e8d9c76b5e243b279ed97ea6f53b6eaf74d
| 303
|
py
|
Python
|
utils/usys.S.py
|
skyzh/core-os-riscv
|
6471afb60dd7be678b6a2fde56f935b536746b4d
|
[
"MIT"
] | 239
|
2020-01-17T19:27:05.000Z
|
2022-03-28T17:05:58.000Z
|
utils/usys.S.py
|
riscv-ripper/core-os-riscv
|
6471afb60dd7be678b6a2fde56f935b536746b4d
|
[
"MIT"
] | 10
|
2020-02-20T06:02:32.000Z
|
2021-06-15T10:33:27.000Z
|
utils/usys.S.py
|
riscv-ripper/core-os-riscv
|
6471afb60dd7be678b6a2fde56f935b536746b4d
|
[
"MIT"
] | 26
|
2020-01-18T09:25:31.000Z
|
2022-01-23T09:59:42.000Z
|
#!/usr/bin/env python3
### Copyright (c) 2020 Alex Chi
###
### This software is released under the MIT License.
### https://opensource.org/licenses/MIT
from syscall import syscalls
for (idx, syscall) in enumerate(syscalls):
print(f"""
.global __{syscall}
__{syscall}:
li a7, {idx}
ecall
ret""")
| 17.823529
| 52
| 0.686469
|
f6f41d3ebe8061406134deb406fa44e3164fd94a
| 698
|
py
|
Python
|
Methods/Slot/SlotW23/comp_height.py
|
Superomeg4/pyleecan
|
2b695b5f39e77475a07aa0ea89489fb0a9659337
|
[
"Apache-2.0"
] | 2
|
2020-06-29T13:48:37.000Z
|
2021-06-15T07:34:05.000Z
|
Methods/Slot/SlotW23/comp_height.py
|
Superomeg4/pyleecan
|
2b695b5f39e77475a07aa0ea89489fb0a9659337
|
[
"Apache-2.0"
] | null | null | null |
Methods/Slot/SlotW23/comp_height.py
|
Superomeg4/pyleecan
|
2b695b5f39e77475a07aa0ea89489fb0a9659337
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
"""@package Methods.Machine.SlotW23.comp_height
SlotW23 Computation of height method
@date Created on Mon Jun 29 15:28:55 2015
@copyright (C) 2014-2015 EOMYS ENGINEERING.
@author pierre_b
@todo check and unittest
"""
def comp_height(self):
"""Compute the height of the Slot.
Caution, the bottom of the Slot is an Arc
Parameters
----------
self : SlotW23
A SlotW23 object
Returns
-------
Htot: float
Height of the slot [m]
"""
Rbo = self.get_Rbo()
[Z1, Z2, Z3, Z4, Z5, Z6, Z7, Z8] = self._comp_point_coordinate()
if self.is_outwards():
return abs(Z4) - Rbo
else:
return Rbo - abs(Z4)
| 20.529412
| 68
| 0.611748
|
b55a43c12c8de29ff15f7b83d6a94849a0112bdf
| 5,498
|
py
|
Python
|
client/client_actions/osx/osx_test.py
|
nahidupa/grr
|
100a9d85ef2abb234e12e3ac2623caffb4116be7
|
[
"Apache-2.0"
] | 1
|
2016-02-13T15:40:20.000Z
|
2016-02-13T15:40:20.000Z
|
client/client_actions/osx/osx_test.py
|
nahidupa/grr
|
100a9d85ef2abb234e12e3ac2623caffb4116be7
|
[
"Apache-2.0"
] | 3
|
2020-02-11T22:29:15.000Z
|
2021-06-10T17:44:31.000Z
|
client/client_actions/osx/osx_test.py
|
nahidupa/grr
|
100a9d85ef2abb234e12e3ac2623caffb4116be7
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# -*- mode: python; encoding: utf-8 -*-
# Copyright 2011 Google Inc. All Rights Reserved.
"""OSX tests."""
import glob
import os
import mock
import mox
from grr.lib import config_lib
from grr.lib import flags
from grr.lib import test_lib
from grr.test_data import osx_launchd as testdata
class OSXClientTests(test_lib.OSSpecificClientTests):
"""OSX client action tests."""
def setUp(self):
super(OSXClientTests, self).setUp()
modules = {
# Necessary to stop the import of client.osx.installers registering the
# actions.ActionPlugin.classes
"grr.client.osx": mock.MagicMock(),
"grr.client.osx.objc": mock.MagicMock(),
# Necessary to stop the import of client_actions.standard re-populating
# actions.ActionPlugin.classes
("grr.client.client_actions"
".standard"): mock.MagicMock(),
}
self.module_patcher = mock.patch.dict("sys.modules", modules)
self.module_patcher.start()
# pylint: disable=g-import-not-at-top
from grr.client.client_actions.osx import osx
# pylint: enable=g-import-not-at-top
self.osx = osx
def tearDown(self):
self.module_patcher.stop()
super(OSXClientTests, self).tearDown()
class OSXFilesystemTests(OSXClientTests):
"""Test reading osx file system."""
def testFileSystemEnumeration64Bit(self):
"""Ensure we can enumerate file systems successfully."""
path = os.path.join(self.base_path, "osx_fsdata")
results = self.osx.client_utils_osx.ParseFileSystemsStruct(
self.osx.client_utils_osx.StatFS64Struct, 7,
open(path).read())
self.assertEqual(len(results), 7)
self.assertEqual(results[0].f_fstypename, "hfs")
self.assertEqual(results[0].f_mntonname, "/")
self.assertEqual(results[0].f_mntfromname, "/dev/disk0s2")
self.assertEqual(results[2].f_fstypename, "autofs")
self.assertEqual(results[2].f_mntonname, "/auto")
self.assertEqual(results[2].f_mntfromname, "map auto.auto")
class OSXDriverTests(OSXClientTests):
"""Test reading osx file system."""
def setUp(self):
super(OSXDriverTests, self).setUp()
self.mox = mox.Mox()
self.mox.StubOutWithMock(self.osx.client_utils_osx, "InstallDriver")
path = os.path.join(config_lib.CONFIG["Test.srcdir"],
"grr/binaries/OSXPMem*.tar.gz")
self.drivers = glob.glob(path)
def tearDown(self):
super(OSXDriverTests, self).tearDown()
self.mox.UnsetStubs()
def testFindKext(self):
action = self.osx.InstallDriver("")
kext_path = os.path.join(self.temp_dir, "testing/something/blah.kext")
os.makedirs(kext_path)
self.assertEqual(action._FindKext(self.temp_dir), kext_path)
os.makedirs(os.path.join(self.temp_dir, "testing/no/kext/here"))
self.assertRaises(RuntimeError, action._FindKext,
os.path.join(self.temp_dir, "testing/no"))
class OSXEnumerateRunningServicesTest(OSXClientTests):
def setUp(self):
super(OSXEnumerateRunningServicesTest, self).setUp()
self.mox = mox.Mox()
self.action = self.osx.OSXEnumerateRunningServices(None)
self.mock_version = self.mox.CreateMock(
self.osx.client_utils_osx.OSXVersion)
self.mox.StubOutWithMock(self.action, "GetRunningLaunchDaemons")
self.mox.StubOutWithMock(self.action, "SendReply")
self.mox.StubOutWithMock(self.osx, "client_utils_osx")
def ValidResponseProto(self, proto):
self.assertTrue(proto.label)
return True
def ValidResponseProtoSingle(self, proto):
td = testdata.JOB[0]
self.assertEqual(proto.label, td["Label"])
self.assertEqual(proto.lastexitstatus,
td["LastExitStatus"].value)
self.assertEqual(proto.sessiontype,
td["LimitLoadToSessionType"])
self.assertEqual(len(proto.machservice),
len(td["MachServices"]))
self.assertEqual(proto.ondemand, td["OnDemand"].value)
self.assertEqual(len(proto.args),
len(td["ProgramArguments"]))
self.assertEqual(proto.timeout, td["TimeOut"].value)
return True
def testOSXEnumerateRunningServicesAll(self):
self.osx.client_utils_osx.OSXVersion().AndReturn(self.mock_version)
self.mock_version.VersionAsFloat().AndReturn(10.7)
self.action.GetRunningLaunchDaemons().AndReturn(testdata.JOBS)
num_results = len(testdata.JOBS) - testdata.FILTERED_COUNT
for _ in range(0, num_results):
self.action.SendReply(mox.Func(self.ValidResponseProto))
self.mox.ReplayAll()
self.action.Run(None)
self.mox.VerifyAll()
def testOSXEnumerateRunningServicesSingle(self):
self.osx.client_utils_osx.OSXVersion().AndReturn(self.mock_version)
self.mock_version.VersionAsFloat().AndReturn(10.7)
self.action.GetRunningLaunchDaemons().AndReturn(testdata.JOB)
self.action.SendReply(mox.Func(self.ValidResponseProtoSingle))
self.mox.ReplayAll()
self.action.Run(None)
self.mox.VerifyAll()
def testOSXEnumerateRunningServicesVersionError(self):
self.osx.client_utils_osx.OSXVersion().AndReturn(self.mock_version)
self.mock_version.VersionAsFloat().AndReturn(10.5)
self.mox.ReplayAll()
self.assertRaises(self.osx.UnsupportedOSVersionError, self.action.Run, None)
self.mox.VerifyAll()
def tearDown(self):
self.mox.UnsetStubs()
super(OSXEnumerateRunningServicesTest, self).tearDown()
def main(argv):
test_lib.main(argv)
if __name__ == "__main__":
flags.StartMain(main)
| 32.341176
| 80
| 0.709167
|
c5ab4a2a4858a4f2105a237ea6c9df626692dfff
| 518
|
py
|
Python
|
default_central/presets/sound/listport.py
|
vizicist/palette
|
01f3bbd8bc4fa4fffdbb96f8e7ed282b6e2274a6
|
[
"MIT"
] | 8
|
2020-11-14T22:50:51.000Z
|
2021-08-10T06:22:01.000Z
|
default_central/presets/sound/listport.py
|
vizicist/palette
|
01f3bbd8bc4fa4fffdbb96f8e7ed282b6e2274a6
|
[
"MIT"
] | 1
|
2021-02-24T20:16:06.000Z
|
2021-12-25T18:53:12.000Z
|
default_central/presets/sound/listport.py
|
vizicist/palette
|
01f3bbd8bc4fa4fffdbb96f8e7ed282b6e2274a6
|
[
"MIT"
] | null | null | null |
import json
import glob
import sys
import os
def listport(paramfile):
f = open(paramfile)
params = json.load(f)
f.close()
p = params["params"]
path = os.path.split(paramfile)
fname = path[len(path)-1]
print("%s \"%s\" %d" % (fname, p["regionport"]["value"], int(p["regionchannel"]["value"])))
if len(sys.argv) < 1:
print("usage: %s [paramdir]" % sys.argv[0])
sys.exit(1)
paramdir = sys.argv[1]
files = glob.glob(os.path.join(paramdir,'*.json'))
for s in files:
listport(s)
| 20.72
| 95
| 0.610039
|
d8fda46d342559b558e7db0b3b1929b2e4c0f36e
| 2,276
|
py
|
Python
|
shared/templates/create_services_disabled.py
|
mpreisler/scap-security-guide
|
4e7df9a865f459d32914edbb83fae21cd4b89124
|
[
"BSD-3-Clause"
] | 1
|
2019-01-07T10:43:25.000Z
|
2019-01-07T10:43:25.000Z
|
shared/templates/create_services_disabled.py
|
mpreisler/scap-security-guide
|
4e7df9a865f459d32914edbb83fae21cd4b89124
|
[
"BSD-3-Clause"
] | null | null | null |
shared/templates/create_services_disabled.py
|
mpreisler/scap-security-guide
|
4e7df9a865f459d32914edbb83fae21cd4b89124
|
[
"BSD-3-Clause"
] | 1
|
2019-12-12T12:08:14.000Z
|
2019-12-12T12:08:14.000Z
|
#
# create_services_disabled.py
# automatically generate checks for disabled services
import sys
from template_common import FilesGenerator, UnknownTargetError
class ServiceDisabledGenerator(FilesGenerator):
def generate(self, target, serviceinfo):
try:
# get the items out of the list
servicename, packagename, daemonname = serviceinfo
if not packagename:
packagename = servicename
except ValueError as e:
print("\tEntry: %s\n" % serviceinfo)
print("\tError unpacking servicename, packagename, and daemonname: " + str(e))
sys.exit(1)
if not daemonname:
daemonname = servicename
if target == "bash":
self.file_from_template(
"./template_BASH_service_disabled",
{
"SERVICENAME": servicename,
"DAEMONNAME": daemonname
},
"./bash/service_{0}_disabled.sh", servicename
)
elif target == "ansible":
self.file_from_template(
"./template_ANSIBLE_service_disabled",
{
"SERVICENAME": servicename,
"DAEMONNAME": daemonname
},
"./ansible/service_{0}_disabled.yml", servicename
)
elif target == "puppet":
self.file_from_template(
"./template_PUPPET_service_disabled",
{
"SERVICENAME": servicename,
"DAEMONNAME": daemonname
},
"./puppet/service_{0}_disabled.yml", servicename
)
elif target == "oval":
self.file_from_template(
"./template_OVAL_service_disabled",
{
"SERVICENAME": servicename,
"DAEMONNAME": daemonname,
"PACKAGENAME": packagename
},
"./oval/service_{0}_disabled.xml", servicename
)
else:
raise UnknownTargetError(target)
def csv_format(self):
return("CSV should contains lines of the format: " +
"servicename,packagename")
| 32.056338
| 90
| 0.521529
|
c3b47ecdb5ca36a9de544a826408846f68913de0
| 6,515
|
py
|
Python
|
ckan/lib/mailer.py
|
ziveo/ckan
|
f4cfe5e28789df58b2bf7e73e5989ffda00e5c5c
|
[
"Apache-2.0"
] | 1
|
2022-02-14T20:25:34.000Z
|
2022-02-14T20:25:34.000Z
|
ckan/lib/mailer.py
|
ziveo/ckan
|
f4cfe5e28789df58b2bf7e73e5989ffda00e5c5c
|
[
"Apache-2.0"
] | null | null | null |
ckan/lib/mailer.py
|
ziveo/ckan
|
f4cfe5e28789df58b2bf7e73e5989ffda00e5c5c
|
[
"Apache-2.0"
] | 3
|
2020-01-02T10:32:37.000Z
|
2021-12-22T07:20:21.000Z
|
# encoding: utf-8
import codecs
import os
import smtplib
import socket
import logging
from time import time
from email.mime.text import MIMEText
from email.header import Header
from email import utils
from ckan.common import config
import ckan.common
from six import text_type
import ckan
import ckan.model as model
import ckan.lib.helpers as h
from ckan.lib.base import render_jinja2
from ckan.common import _
log = logging.getLogger(__name__)
class MailerException(Exception):
pass
def _mail_recipient(recipient_name, recipient_email,
sender_name, sender_url, subject,
body, headers=None):
if not headers:
headers = {}
mail_from = config.get('smtp.mail_from')
reply_to = config.get('smtp.reply_to')
msg = MIMEText(body.encode('utf-8'), 'plain', 'utf-8')
for k, v in headers.items():
if k in msg.keys():
msg.replace_header(k, v)
else:
msg.add_header(k, v)
subject = Header(subject.encode('utf-8'), 'utf-8')
msg['Subject'] = subject
msg['From'] = _("%s <%s>") % (sender_name, mail_from)
recipient = u"%s <%s>" % (recipient_name, recipient_email)
msg['To'] = Header(recipient, 'utf-8')
msg['Date'] = utils.formatdate(time())
msg['X-Mailer'] = "CKAN %s" % ckan.__version__
if reply_to and reply_to != '':
msg['Reply-to'] = reply_to
# Send the email using Python's smtplib.
smtp_connection = smtplib.SMTP()
if 'smtp.test_server' in config:
# If 'smtp.test_server' is configured we assume we're running tests,
# and don't use the smtp.server, starttls, user, password etc. options.
smtp_server = config['smtp.test_server']
smtp_starttls = False
smtp_user = None
smtp_password = None
else:
smtp_server = config.get('smtp.server', 'localhost')
smtp_starttls = ckan.common.asbool(
config.get('smtp.starttls'))
smtp_user = config.get('smtp.user')
smtp_password = config.get('smtp.password')
try:
smtp_connection.connect(smtp_server)
except socket.error as e:
log.exception(e)
raise MailerException('SMTP server could not be connected to: "%s" %s'
% (smtp_server, e))
try:
# Identify ourselves and prompt the server for supported features.
smtp_connection.ehlo()
# If 'smtp.starttls' is on in CKAN config, try to put the SMTP
# connection into TLS mode.
if smtp_starttls:
if smtp_connection.has_extn('STARTTLS'):
smtp_connection.starttls()
# Re-identify ourselves over TLS connection.
smtp_connection.ehlo()
else:
raise MailerException("SMTP server does not support STARTTLS")
# If 'smtp.user' is in CKAN config, try to login to SMTP server.
if smtp_user:
assert smtp_password, ("If smtp.user is configured then "
"smtp.password must be configured as well.")
smtp_connection.login(smtp_user, smtp_password)
smtp_connection.sendmail(mail_from, [recipient_email], msg.as_string())
log.info("Sent email to {0}".format(recipient_email))
except smtplib.SMTPException as e:
msg = '%r' % e
log.exception(msg)
raise MailerException(msg)
finally:
smtp_connection.quit()
def mail_recipient(recipient_name, recipient_email, subject,
body, headers={}):
site_title = config.get('ckan.site_title')
site_url = config.get('ckan.site_url')
return _mail_recipient(recipient_name, recipient_email,
site_title, site_url, subject, body,
headers=headers)
def mail_user(recipient, subject, body, headers={}):
if (recipient.email is None) or not len(recipient.email):
raise MailerException(_("No recipient email address available!"))
mail_recipient(recipient.display_name, recipient.email, subject,
body, headers=headers)
def get_reset_link_body(user):
extra_vars = {
'reset_link': get_reset_link(user),
'site_title': config.get('ckan.site_title'),
'site_url': config.get('ckan.site_url'),
'user_name': user.name,
}
# NOTE: This template is translated
return render_jinja2('emails/reset_password.txt', extra_vars)
def get_invite_body(user, group_dict=None, role=None):
if group_dict:
group_type = (_('organization') if group_dict['is_organization']
else _('group'))
extra_vars = {
'reset_link': get_reset_link(user),
'site_title': config.get('ckan.site_title'),
'site_url': config.get('ckan.site_url'),
'user_name': user.name,
}
if role:
extra_vars['role_name'] = h.roles_translated().get(role, _(role))
if group_dict:
extra_vars['group_type'] = group_type
extra_vars['group_title'] = group_dict.get('title')
# NOTE: This template is translated
return render_jinja2('emails/invite_user.txt', extra_vars)
def get_reset_link(user):
return h.url_for(controller='user',
action='perform_reset',
id=user.id,
key=user.reset_key,
qualified=True)
def send_reset_link(user):
create_reset_key(user)
body = get_reset_link_body(user)
extra_vars = {
'site_title': config.get('ckan.site_title')
}
subject = render_jinja2('emails/reset_password_subject.txt', extra_vars)
# Make sure we only use the first line
subject = subject.split('\n')[0]
mail_user(user, subject, body)
def send_invite(user, group_dict=None, role=None):
create_reset_key(user)
body = get_invite_body(user, group_dict, role)
extra_vars = {
'site_title': config.get('ckan.site_title')
}
subject = render_jinja2('emails/invite_user_subject.txt', extra_vars)
# Make sure we only use the first line
subject = subject.split('\n')[0]
mail_user(user, subject, body)
def create_reset_key(user):
user.reset_key = text_type(make_key())
model.repo.commit_and_remove()
def make_key():
return codecs.encode(os.urandom(16), 'hex')
def verify_reset_link(user, key):
if not key:
return False
if not user.reset_key or len(user.reset_key) < 5:
return False
return key.strip() == user.reset_key
| 31.322115
| 79
| 0.634229
|
972979266da3ad85222416925a27b391b57f151c
| 6,341
|
py
|
Python
|
tests/keys.py
|
sdss/opscore
|
dd4f2b2ad525fe3dfe3565463de2c079a7e1232e
|
[
"BSD-3-Clause"
] | null | null | null |
tests/keys.py
|
sdss/opscore
|
dd4f2b2ad525fe3dfe3565463de2c079a7e1232e
|
[
"BSD-3-Clause"
] | 1
|
2021-08-17T21:08:14.000Z
|
2021-08-17T21:08:14.000Z
|
tests/keys.py
|
sdss/opscore
|
dd4f2b2ad525fe3dfe3565463de2c079a7e1232e
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python
"""Unit tests for opscore.protocols.keys
"""
# Created 18-Nov-2008 by David Kirkby (dkirkby@uci.edu)
import unittest
import opscore.protocols.keys as protoKeys
import opscore.protocols.messages as protoMess
import opscore.protocols.types as protoTypes
class KeysTest(unittest.TestCase):
def setUp(self):
self.k1 = protoMess.Keyword("key1")
self.k2 = protoMess.Keyword("key2", ["-1.2"])
self.k3 = protoMess.Keyword("key3", [0xDEAD, 0xBEEF])
self.k4 = protoMess.Keyword("key4", ["?"])
self.key1 = protoKeys.Key("Key1")
self.key2 = protoKeys.Key("Key2", protoTypes.Float())
self.key3 = protoKeys.Key("Key3", protoTypes.UInt() * 2)
self.key4 = protoKeys.Key("Key4", protoTypes.Float(invalid="?"))
def test00(self):
"Key validation passes"
self.assertTrue(self.key1.consume(self.k1))
self.assertTrue(self.key2.consume(self.k2))
self.assertTrue(self.key3.consume(self.k3))
self.assertTrue(self.key4.consume(self.k4))
self.assertTrue(len(self.k1.values) == 0)
self.assertTrue(self.k2.values[0] == -1.2)
self.assertTrue(self.k3.values[0] == 0xDEAD)
self.assertTrue(self.k3.values[1] == 0xBEEF)
self.assertTrue(self.k4.values[0] == None)
def test01(self):
"Key validation fails"
self.assertFalse(self.key1.consume(self.k2))
self.assertFalse(self.key1.consume(self.k3))
self.assertFalse(self.key2.consume(self.k1))
self.assertFalse(self.key2.consume(self.k3))
self.assertFalse(self.key3.consume(self.k1))
self.assertFalse(self.key3.consume(self.k2))
def test02(self):
"Keyword creation with a list of valid string values"
self.assertEqual(self.key1.create([]), self.k1)
self.assertEqual(self.key2.create(["-1.2"]), self.k2)
self.assertEqual(self.key3.create(["0xdead", "0xbeef"]), self.k3)
def test03(self):
"Keyword creation with varargs valid string values"
self.assertEqual(self.key1.create(), self.k1)
self.assertEqual(self.key2.create("-1.2"), self.k2)
self.assertEqual(self.key3.create("0xdead", "0xbeef"), self.k3)
def test04(self):
"Keyword creation with valid typed values"
self.assertEqual(self.key2.create(-1.2), self.k2)
self.assertEqual(self.key3.create(0xDEAD, 0xBEEF), self.k3)
self.assertEqual(self.key3.create("0xdead", 0xBEEF), self.k3)
self.assertEqual(self.key3.create([0xDEAD, "0xbeef"]), self.k3)
def test05(self):
"Keyword creation with wrong number of values"
self.assertRaises(protoKeys.KeysError, lambda: self.key1.create(-1.2))
self.assertRaises(protoKeys.KeysError, lambda: self.key2.create())
self.assertRaises(protoKeys.KeysError, lambda: self.key3.create("0xdead"))
def test06(self):
"Keyword creation with wrong value types"
self.assertRaises(protoKeys.KeysError, lambda: self.key2.create("abc"))
self.assertRaises(protoKeys.KeysError, lambda: self.key3.create(0xDEAD, "abc"))
self.assertRaises(
protoKeys.KeysError, lambda: self.key3.create("abc", "0xdead")
)
def test07(self):
"Read testing dictionary (disabled since testing actor has been deleted)"
pass
# kdict = protoKeys.KeysDictionary.load("testing")
# self.failUnless('unsigned' in kdict)
# self.failUnless('UnSigned' in kdict)
def test08(self):
"Generic compound value type without explicit wrapper"
msgKey = protoKeys.Key(
"msg",
protoTypes.CompoundValueType(
protoTypes.Enum("INFO", "WARN", "ERROR", name="code"),
protoTypes.String(name="text"),
),
)
msg = protoMess.Keyword("msg", ["INFO", "Hello, world"])
self.assertTrue(msgKey.consume(msg))
self.assertEqual(len(msg.values), 1)
self.assertTrue(isinstance(msg.values[0], tuple))
self.assertTrue(msg.values[0] == ("INFO", "Hello, world"))
def test09(self):
"Generic compound value type with explicit wrapper"
class Wrapped(object):
def __init__(self, code, text):
pass
msgKey = protoKeys.Key(
"msg",
protoTypes.CompoundValueType(
protoTypes.Enum("INFO", "WARN", "ERROR", name="code"),
protoTypes.String(name="text"),
wrapper=Wrapped,
),
)
msg = protoMess.Keyword("msg", ["INFO", "Hello, world"])
self.assertTrue(msgKey.consume(msg))
self.assertEqual(len(msg.values), 1)
self.assertTrue(isinstance(msg.values[0], Wrapped))
def test10(self):
"Generic compound value type with wrapping disabled"
msgKey = protoKeys.Key(
"msg",
protoTypes.CompoundValueType(
protoTypes.Enum("INFO", "WARN", "ERROR", name="code"),
protoTypes.String(name="text"),
),
)
msg = protoMess.Keyword("msg", ["INFO", "Hello, world"])
protoTypes.CompoundValueType.WrapEnable = False
self.assertTrue(msgKey.consume(msg))
protoTypes.CompoundValueType.WrapEnable = True
self.assertEqual(len(msg.values), 2)
self.assertTrue(msg.values[0] == "INFO")
self.assertTrue(msg.values[1] == "Hello, world")
def test11(self):
"PVT test"
pvtKey = protoKeys.Key("pvtMsg", protoTypes.PVT(), protoTypes.Float())
msg = protoMess.Keyword("pvtMsg", [1, 2, 3, 4])
self.assertTrue(pvtKey.consume(msg))
self.assertEqual(len(msg.values), 2)
import opscore.RO.PVT
self.assertTrue(isinstance(msg.values[0], opscore.RO.PVT.PVT))
self.assertEqual(repr(msg.values[0]), repr(opscore.RO.PVT.PVT(1, 2, 3)))
self.assertEqual(msg.values[1], 4)
def test12(self):
"Invalid value"
self.key4.consume(self.k4)
ival = self.k4.values[0]
self.assertEqual(ival, None)
self.assertEqual(ival, protoTypes.InvalidValue)
self.assertEqual(
{None: "-None-", protoTypes.InvalidValue: "-Invalid-"}[ival], "-Invalid-"
)
if __name__ == "__main__":
unittest.main()
| 39.141975
| 87
| 0.618672
|
2d394152278af834437b15cb3d766242f4c49900
| 6,827
|
py
|
Python
|
lorawan_gateway.py
|
leeatwill/single_chan_pkt_fwd
|
c1dd87cd5e8c035626599a36cf7f8286fcc5a25a
|
[
"BSD-3-Clause"
] | null | null | null |
lorawan_gateway.py
|
leeatwill/single_chan_pkt_fwd
|
c1dd87cd5e8c035626599a36cf7f8286fcc5a25a
|
[
"BSD-3-Clause"
] | null | null | null |
lorawan_gateway.py
|
leeatwill/single_chan_pkt_fwd
|
c1dd87cd5e8c035626599a36cf7f8286fcc5a25a
|
[
"BSD-3-Clause"
] | null | null | null |
"""
Raspberry Pi Single Channel Gateway
Learn Guide: https://learn.adafruit.com/raspberry-pi-single-channel-lorawan-gateway
Author: Brent Rubell for Adafruit Industries
"""
# Import Python System Libraries
import json
import time
import subprocess
import uuid
# Import Adafruit Blinka Libraries
import busio
from digitalio import DigitalInOut, Direction, Pull
import board
# Import the SSD1306 module.
import adafruit_ssd1306
# Button A
btnA = DigitalInOut(board.D5)
btnA.direction = Direction.INPUT
btnA.pull = Pull.UP
# Button B
btnB = DigitalInOut(board.D6)
btnB.direction = Direction.INPUT
btnB.pull = Pull.UP
# Button C
btnC = DigitalInOut(board.D12)
btnC.direction = Direction.INPUT
btnC.pull = Pull.UP
# Create the I2C interface.
i2c = busio.I2C(board.SCL, board.SDA)
# 128x32 OLED Display
display = adafruit_ssd1306.SSD1306_I2C(128, 32, i2c)
# Clear the display.
display.fill(0)
display.show()
width = display.width
height = display.height
# Gateway id calculation (based off MAC address)
mac_addr = hex(uuid.getnode()).replace('0x', '')
print('Gateway ID: {0}:{1}:{2}:ff:ff:{3}:{4}:{5}'.format(mac_addr[0:2],mac_addr[2:4],
mac_addr[4:6],mac_addr[6:8],
mac_addr[8:10], mac_addr[10:12]))
# Parse `global_conf.json`
with open('global_conf.json', 'r') as config:
gateway_config = json.load(config)
# parse `SX127x_conf`
SX127x_conf = gateway_config['SX127x_conf']
gateway_freq = SX127x_conf['freq']/1000000
gateway_sf = SX127x_conf['spread_factor']
# parse `gateway_conf`
gateway_conf = gateway_config['gateway_conf']
gateway_name = gateway_conf['name']
# parse 'gateway_conf[servers]'
server_list = gateway_conf['servers']
ttn_server = server_list[0]
ttn_server_addr = ttn_server['address']
def stats():
"""Prints information about the Pi
to a display
"""
print('MODE: Pi Stats')
# Clear Display
display.fill(0)
# Shell scripts for system monitoring from here :
# https://unix.stackexchange.com/questions/119126/command-to-display-memory-usage-disk-usage-and-cpu-load
cmd = "hostname -I | cut -d\' \' -f1"
IP = subprocess.check_output(cmd, shell=True).decode("utf-8")
cmd = "top -bn1 | grep load | awk '{printf \"CPU Load: %.2f\", $(NF-2)}'"
CPU = subprocess.check_output(cmd, shell=True).decode("utf-8")
cmd = "free -m | awk 'NR==2{printf \"Mem: %s/%s MB %.2f%%\", $3,$2,$3*100/$2 }'"
MemUsage = subprocess.check_output(cmd, shell=True).decode("utf-8")
# write text to display
display.text("IP: "+str(IP), 0, 0, 1)
display.text(str(CPU), 0, 15, 1)
display.text(str(MemUsage), 0, 25, 1)
# display text for 3 seconds
display.show()
time.sleep(3)
def gateway():
"""Runs the single channel packet forwarder,
sends output to a display.
"""
print('MODE: Pi Gateway')
# Clear Display
display.fill(0)
display.text("Starting Gateway...", 15, 0, 1)
display.show()
print('starting gateway...')
try:
proc = subprocess.Popen("./single_chan_pkt_fwd",
bufsize=-1, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
except FileNotFoundError:
print("To run the single packet forwarder, you'll need to run `sudo make all` first.")
return
display.fill(0)
display.text(gateway_name, 15, 0, 1)
display.show()
while True:
new_line = proc.stdout.readline().decode('utf-8')
print(new_line)
# grab new data on gateway status update
if new_line == "gateway status update\n":
display.fill(0)
gtwy_timestamp = proc.stdout.readline().decode('utf-8')
print('time:', gtwy_timestamp)
gtwy_status = proc.stdout.readline().decode('utf-8')
print(gtwy_status)
display.text(gateway_name, 15, 0, 1)
display.text(gtwy_status, 0, 15, 1)
display.text(gtwy_timestamp[11:23], 25, 25, 1)
elif new_line == "incoming packet...\n":
display.fill(0)
print('incoming pkt...')
# read incoming packet info
pkt_json = proc.stdout.readline().decode('utf-8')
# remove "gateway status update" text from TTN packet
pkt_json = pkt_json.replace("gateway status update", "")
print(pkt_json)
# parse packet
pkt_data = json.loads(pkt_json)
rxpk_data = pkt_data['rxpk']
pkt_data = rxpk_data.pop(0)
# display packet info
pkt_freq = pkt_data['freq']
pkt_size = pkt_data['size']
pkt_rssi = pkt_data['rssi']
pkt_tmst = pkt_data['tmst']
display.text('* PKT RX on {0}MHz'.format(pkt_freq), 0, 0, 1)
display.text('RSSI: {0}dBm, Sz: {1}b'.format(pkt_rssi, pkt_size), 0, 10, 1)
display.text('timestamp: {0}'.format(pkt_tmst), 0, 20, 1)
display.show()
def gateway_info():
"""Displays information about the LoRaWAN gateway.
"""
print('MODE: Gateway Info')
display.fill(0)
display.show()
print('Server: ', ttn_server_addr[0:9])
print('Freq: ', gateway_freq)
print('SF: ', gateway_sf)
print('Gateway Name:', gateway_name)
# write 3 lines of text
display.text(gateway_name, 15, 0, 1)
display.text('{0} MHz, SF{1}'.format(gateway_freq, gateway_sf), 15, 10, 1)
display.text('TTN: {0}'.format(ttn_server_addr[0:9]), 15, 20, 1)
display.show()
time.sleep(3)
while True:
# draw a box to clear the image
display.fill(0)
display.text('LoRaWAN Gateway EUI', 15, 0, 1)
display.text('{0}:{1}:{2}:ff'.format(mac_addr[0:2], mac_addr[2:4],
mac_addr[4:6]), 25, 15, 1)
display.text('ff:{0}:{1}:{2}'.format(mac_addr[6:8],mac_addr[8:10],
mac_addr[10:12]), 25, 25, 1)
# Radio Bonnet Buttons
if not btnA.value:
# show pi info
stats()
if not btnB.value:
# start the gateway
gateway()
if not btnC.value:
# show gateway configuration
gateway_info()
display.show()
time.sleep(.1)
class balenaSenseHTTP(BaseHTTPRequestHandler):
def _set_headers(self):
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
def do_GET(self):
self._set_headers()
measurements = balenasense.sample()
self.wfile.write(json.dumps(measurements[0]['fields']).encode('UTF-8'))
def do_HEAD(self):
self._set_headers()
balenasense = gateway()
while True:
server_address = ('', 80)
httpd = HTTPServer(server_address, balenaSenseHTTP)
print('Sensor HTTP server running')
httpd.serve_forever()
| 33.630542
| 109
| 0.620917
|
af1a7f03a0e9c13faaae11b8e06650b3592191f6
| 1,525
|
py
|
Python
|
GeneratorInterface/PomwigInterface/test/analyzeSDDijets_cfg.py
|
ckamtsikis/cmssw
|
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
|
[
"Apache-2.0"
] | 852
|
2015-01-11T21:03:51.000Z
|
2022-03-25T21:14:00.000Z
|
GeneratorInterface/PomwigInterface/test/analyzeSDDijets_cfg.py
|
ckamtsikis/cmssw
|
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
|
[
"Apache-2.0"
] | 30,371
|
2015-01-02T00:14:40.000Z
|
2022-03-31T23:26:05.000Z
|
GeneratorInterface/PomwigInterface/test/analyzeSDDijets_cfg.py
|
ckamtsikis/cmssw
|
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
|
[
"Apache-2.0"
] | 3,240
|
2015-01-02T05:53:18.000Z
|
2022-03-31T17:24:21.000Z
|
import FWCore.ParameterSet.Config as cms
process = cms.Process('Analysis')
process.maxEvents = cms.untracked.PSet(
input = cms.untracked.int32(-1)
)
process.source = cms.Source("PoolSource",
fileNames = cms.untracked.vstring("file:POMWIG_SingleDiffractiveDijetsPlus_8TeV_Pt_30_cff_py_GEN.root")
)
#process.load('SimGeneral.HepPDTESSource.pythiapdt_cfi')
#process.load('PhysicsTools.HepMCCandAlgos.genParticles_cfi')
#process.load('RecoJets.Configuration.GenJetParticles_cff')
#process.load('RecoJets.Configuration.RecoGenJets_cff')
#process.genParticles.abortOnUnknownPDGCode = False
process.genParticlesVisible = cms.EDFilter("GenJetParticleRefSelector",
includeList = cms.vstring(),
src = cms.InputTag("genParticles"),
stableOnly = cms.bool(True),
verbose = cms.untracked.bool(True),
excludeList = cms.vstring('nu_e',
'nu_mu',
'nu_tau',
'mu-',
'~chi_10',
'~nu_eR',
'~nu_muR',
'~nu_tauR',
'Graviton',
'~Gravitino',
'nu_Re',
'nu_Rmu',
'nu_Rtau',
'nu*_e0',
'Graviton*')
)
process.SDDijets = cms.EDAnalyzer("SDDijetsAnalyzer",
GenParticleTag = cms.InputTag("genParticles"),
GenJetTag = cms.InputTag("ak5GenJets"),
EBeam = cms.double(4000.0),
debug = cms.untracked.bool(True)
)
process.add_(cms.Service("TFileService",
fileName = cms.string("analysisSDDijets_histos.root")
)
)
process.analysis = cms.Path(process.genParticlesVisible*process.SDDijets)
| 27.727273
| 107
| 0.685246
|
fcb2317ef6c4e3589df24a8f8d41e626d7256eb6
| 4,540
|
py
|
Python
|
monai/handlers/mean_dice.py
|
Irme/MONAI
|
dc4bf661831b14f4231cb325cc1b15d38c1e406c
|
[
"Apache-2.0"
] | 3
|
2020-10-12T02:13:27.000Z
|
2021-04-14T11:46:21.000Z
|
monai/handlers/mean_dice.py
|
JZK00/MONAI
|
49e693c4e7df83dc1f8ab87349373de9263188a9
|
[
"Apache-2.0"
] | 1
|
2020-09-17T12:41:51.000Z
|
2020-09-29T15:20:37.000Z
|
monai/handlers/mean_dice.py
|
JZK00/MONAI
|
49e693c4e7df83dc1f8ab87349373de9263188a9
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2020 MONAI Consortium
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Callable, Optional, Sequence
import torch
from monai.metrics import DiceMetric
from monai.utils import MetricReduction, exact_version, optional_import
NotComputableError, _ = optional_import("ignite.exceptions", "0.4.2", exact_version, "NotComputableError")
Metric, _ = optional_import("ignite.metrics", "0.4.2", exact_version, "Metric")
reinit__is_reduced, _ = optional_import("ignite.metrics.metric", "0.4.2", exact_version, "reinit__is_reduced")
sync_all_reduce, _ = optional_import("ignite.metrics.metric", "0.4.2", exact_version, "sync_all_reduce")
class MeanDice(Metric): # type: ignore[valid-type, misc] # due to optional_import
"""
Computes Dice score metric from full size Tensor and collects average over batch, class-channels, iterations.
"""
def __init__(
self,
include_background: bool = True,
to_onehot_y: bool = False,
mutually_exclusive: bool = False,
sigmoid: bool = False,
other_act: Optional[Callable] = None,
logit_thresh: float = 0.5,
output_transform: Callable = lambda x: x,
device: Optional[torch.device] = None,
) -> None:
"""
Args:
include_background: whether to include dice computation on the first channel of the predicted output.
Defaults to True.
to_onehot_y: whether to convert the output prediction into the one-hot format. Defaults to False.
mutually_exclusive: if True, the output prediction will be converted into a binary matrix using
a combination of argmax and to_onehot. Defaults to False.
sigmoid: whether to add sigmoid function to the output prediction before computing Dice.
Defaults to False.
other_act: callable function to replace `sigmoid` as activation layer if needed, Defaults to ``None``.
for example: `other_act = torch.tanh`.
logit_thresh: the threshold value to round value to 0.0 and 1.0. Defaults to None (no thresholding).
output_transform: transform the ignite.engine.state.output into [y_pred, y] pair.
device: device specification in case of distributed computation usage.
See also:
:py:meth:`monai.metrics.meandice.compute_meandice`
"""
super().__init__(output_transform, device=device)
self.dice = DiceMetric(
include_background=include_background,
to_onehot_y=to_onehot_y,
mutually_exclusive=mutually_exclusive,
sigmoid=sigmoid,
other_act=other_act,
logit_thresh=logit_thresh,
reduction=MetricReduction.MEAN,
)
self._sum = 0.0
self._num_examples = 0
@reinit__is_reduced
def reset(self) -> None:
self._sum = 0.0
self._num_examples = 0
@reinit__is_reduced
def update(self, output: Sequence[torch.Tensor]) -> None:
"""
Args:
output: sequence with contents [y_pred, y].
Raises:
ValueError: When ``output`` length is not 2. MeanDice metric can only support y_pred and y.
"""
if len(output) != 2:
raise ValueError(f"output must have length 2, got {len(output)}.")
y_pred, y = output
score = self.dice(y_pred, y)
assert self.dice.not_nans is not None
not_nans = int(self.dice.not_nans.item())
# add all items in current batch
self._sum += score.item() * not_nans
self._num_examples += not_nans
@sync_all_reduce("_sum", "_num_examples")
def compute(self) -> float:
"""
Raises:
NotComputableError: When ``compute`` is called before an ``update`` occurs.
"""
if self._num_examples == 0:
raise NotComputableError("MeanDice must have at least one example before it can be computed.")
return self._sum / self._num_examples
| 41.651376
| 114
| 0.661454
|
c0326266597fc02bab4cf5a938f46ce234430831
| 185
|
py
|
Python
|
app/celerydemo/tasks.py
|
rudra012/dj_celery_docker
|
37ba5f11eda472a445540605c33037e53f09a15f
|
[
"MIT"
] | null | null | null |
app/celerydemo/tasks.py
|
rudra012/dj_celery_docker
|
37ba5f11eda472a445540605c33037e53f09a15f
|
[
"MIT"
] | null | null | null |
app/celerydemo/tasks.py
|
rudra012/dj_celery_docker
|
37ba5f11eda472a445540605c33037e53f09a15f
|
[
"MIT"
] | null | null | null |
from celery import shared_task
from .models import TaskLog
@shared_task
def logging_task():
print('Logging task invoked...........')
TaskLog.objects.create(task_name='test')
| 18.5
| 44
| 0.713514
|
f105465aad29e94a9eddc88ab93324e35402c7f9
| 780
|
py
|
Python
|
src/gatekeeper/gatekeeper/util/validation.py
|
CN-UPB/Cloud-NFV-Orchestration
|
28a6852f529ac73fe28f4448597f455b2d2fe552
|
[
"Apache-2.0"
] | 10
|
2019-01-09T06:32:58.000Z
|
2021-11-16T11:36:22.000Z
|
src/gatekeeper/gatekeeper/util/validation.py
|
CN-UPB/Cloud-NFV-Orchestration
|
28a6852f529ac73fe28f4448597f455b2d2fe552
|
[
"Apache-2.0"
] | 14
|
2019-11-13T06:51:51.000Z
|
2021-12-09T02:01:29.000Z
|
src/gatekeeper/gatekeeper/util/validation.py
|
CN-UPB/Cloud-NFV-Orchestration
|
28a6852f529ac73fe28f4448597f455b2d2fe552
|
[
"Apache-2.0"
] | 7
|
2019-02-06T05:46:56.000Z
|
2021-08-21T13:56:07.000Z
|
"""
Functions to validate data against Pishahang JSON schemas
"""
import json
from pathlib import Path
import jsonschema
from gatekeeper.models.descriptors import DescriptorType
DESCRIPTOR_SCHEMA_DIR = Path(__file__).parents[3] / "schemas/bundled/descriptors"
schemaMap = {}
for descriptorType, filename in [
(DescriptorType.SERVICE, "service/service"),
(DescriptorType.OPENSTACK, "functions/openstack"),
(DescriptorType.KUBERNETES, "functions/kubernetes"),
(DescriptorType.AWS, "functions/aws"),
]:
with (DESCRIPTOR_SCHEMA_DIR / filename).with_suffix(".json").open() as schema:
schemaMap[descriptorType.value] = json.load(schema)
def validateDescriptor(type: str, descriptor: dict):
return jsonschema.validate(descriptor, schemaMap[type])
| 27.857143
| 82
| 0.755128
|
69771187f2374577a31d8f8dac915400578f9952
| 1,088
|
py
|
Python
|
hood/migrations/0004_post.py
|
D-Kamunya/watch_hood
|
26044e8b38cb3f89b5474fa1adc2db7f463555fa
|
[
"Unlicense"
] | null | null | null |
hood/migrations/0004_post.py
|
D-Kamunya/watch_hood
|
26044e8b38cb3f89b5474fa1adc2db7f463555fa
|
[
"Unlicense"
] | null | null | null |
hood/migrations/0004_post.py
|
D-Kamunya/watch_hood
|
26044e8b38cb3f89b5474fa1adc2db7f463555fa
|
[
"Unlicense"
] | null | null | null |
# Generated by Django 3.1.2 on 2020-10-31 19:52
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('hood', '0003_business'),
]
operations = [
migrations.CreateModel(
name='Post',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('post_title', models.CharField(max_length=150)),
('post_text', models.TextField()),
('upload_date', models.DateTimeField(auto_now_add=True)),
('neighbourhood', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='hood.neighbourhood')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ['upload_date'],
},
),
]
| 35.096774
| 123
| 0.609375
|
db1b4a07f43a24ee45d3b8649bab6e550d83ea1a
| 568
|
py
|
Python
|
leapy/dask/__init__.py
|
nonabelian/leapy
|
152152eed87572983dd61b27a4a1726b5cb2e615
|
[
"BSD-3-Clause"
] | 1
|
2019-05-01T01:59:03.000Z
|
2019-05-01T01:59:03.000Z
|
leapy/dask/__init__.py
|
nonabelian/leapy
|
152152eed87572983dd61b27a4a1726b5cb2e615
|
[
"BSD-3-Clause"
] | null | null | null |
leapy/dask/__init__.py
|
nonabelian/leapy
|
152152eed87572983dd61b27a4a1726b5cb2e615
|
[
"BSD-3-Clause"
] | null | null | null |
from dask_ml.linear_model import LogisticRegression
from .transformers import OneHotEncoder
from .transformers import Selector
from .transformers.export import OneHotEncoderExporter
from .transformers.export import TrivialExporter
setattr(LogisticRegression, 'to_runtime', TrivialExporter.to_runtime)
# setattr(SGDClassifier, 'to_runtime', TrivialExporter.to_runtime)
# setattr(Incremental, 'to_runtime', TrivialExporter.to_runtime)
setattr(OneHotEncoder, 'to_runtime', OneHotEncoderExporter.to_runtime)
setattr(Selector, 'to_runtime', TrivialExporter.to_runtime)
| 37.866667
| 70
| 0.850352
|
0863659995ffea46242e0e990769f9c6b99abbdd
| 988
|
py
|
Python
|
test/test_lz4_encoder.py
|
mmontagna/generic-encoders
|
3344a6dfb6173feb0f8ac6102ac01d5a4425c99c
|
[
"MIT"
] | 5
|
2018-07-06T05:30:51.000Z
|
2021-03-04T05:09:35.000Z
|
test/test_lz4_encoder.py
|
mmontagna/generic-encoders
|
3344a6dfb6173feb0f8ac6102ac01d5a4425c99c
|
[
"MIT"
] | 4
|
2018-07-03T15:09:33.000Z
|
2018-08-11T04:06:01.000Z
|
test/test_lz4_encoder.py
|
mmontagna/generic-encoders
|
3344a6dfb6173feb0f8ac6102ac01d5a4425c99c
|
[
"MIT"
] | null | null | null |
import unittest
import six
from generic_encoders import lz4_encoder
class TestLz4Encoder(unittest.TestCase):
def test_lz4_encoders_bytes(self):
string = b"some string 123"
self.assertEqual(string,
lz4_encoder.decode(
lz4_encoder.encode(string)
))
def test_throws_exception_when_encode_passed_bogus_type(self):
string = 123
with self.assertRaises(TypeError) as context:
lz4_encoder.encode(string)
def test_throws_exception_when_decode_passed_bogus_type(self):
string = 123
with self.assertRaises(TypeError) as context:
lz4_encoder.decode(string)
def test_lz4_decode(self):
string = b"asdaaaaaaaaaaaaaaaaasdasd\n"
self.assertEqual(string,
lz4_encoder.decode(
b'\x04"M\x18d@\xa7\x0e\x00\x00\x00Lasda\x01\x00`sdasd\n\x00\x00\x00\x00+6\x98\xaf'
))
if __name__ == '__main__':
unittest.main()
| 29.939394
| 98
| 0.652834
|
815e8b31593996feb91b690f38dc075513948bd8
| 24,467
|
py
|
Python
|
src/nufeb_tools/utils.py
|
Jsakkos/nufeb_tools
|
e7143102a7ceab5505ebcf02128b1fe4f3535320
|
[
"MIT"
] | null | null | null |
src/nufeb_tools/utils.py
|
Jsakkos/nufeb_tools
|
e7143102a7ceab5505ebcf02128b1fe4f3535320
|
[
"MIT"
] | null | null | null |
src/nufeb_tools/utils.py
|
Jsakkos/nufeb_tools
|
e7143102a7ceab5505ebcf02128b1fe4f3535320
|
[
"MIT"
] | null | null | null |
import os
import h5py
from pathlib import Path
import pandas as pd
import numpy as np
import json
import subprocess
import sys
import argparse
import pickle
import json
from urllib.parse import urlparse
from urllib.request import urlretrieve
import tarfile
from scipy.spatial.distance import pdist,squareform
from scipy.spatial import KDTree
from tqdm import tqdm
import cv2
from nufeb_tools import __version__
urls= ['https://github.com/Jsakkos/nufeb-tools/raw/main/data/runs.tar']
class get_data:
"""Collect results for analysis.
NUFEB simulation data class to collect results for analysis
Attributes:
test (bool): Set `test = True` to get example data from the Github repository
directory (str): Path to the directory containing NUFEB simulation data. If `directory = None`, get_data will look for a DataFed collection
id (str):DataFed record ID, e.g., `"c/34558900"`
timestep (int): Length of simulation timestep in seconds
SucRatio (int): Relative cyanobacterial sucrose secretion level, 0-100
timepoints (List(str)): List of timepoints in the simulation
dims (List(str)): Size of the simulation boundaries in micrometers
numsteps (int): Number of timepoints
biomass (pandas.DataFrame): Pandas Dataframe containing the biomass vs time data from biomass.csv
ntypes (pandas.DataFrame): Pandas Dataframe containing the cell number vs time data from ntypes.csv
avg_con (pandas.DataFrame): Pandas Dataframe containing the average nutrient concentrations vs time data from avg_concentration.csv
positions (pandas.DataFrame): Pandas Dataframe containing the single cell biomass over time of all cell ids present at the timepoint
"""
def __init__(self,directory=None,id=None,test=None,timestep=10):
self.timestep=timestep
if test:
self.directory = str((Path.home()) / '.nufeb_tools' / 'data' / 'Run_45_56_45_1_2021-12-03_671906')
if not os.path.isdir(self.directory):
download_test_data()
self.get_local_data()
elif directory:
self.directory = directory
self.get_local_data()
elif id:
self.id = id
self.get_datafed_data()
elif directory and id:
print('Define either a local directory or DataFed Collection ID, not both')
else:
print('Missing local directory or DataFed Collection ID')
self.dims += [self.dims.pop(0)] # move Z dimension to last: z,x,y to x,y,z
self.numsteps = len(self.timepoints)
self.Timesteps = self.positions.Timestep.unique()
self.calc_biomass()
#self.get_mothers()
def get_local_data(self):
"""
Collect NUFEB simulation data from a local directory.
"""
try:
h5 = h5py.File(os.path.join(self.directory,'trajectory.h5'),mode='r')
self.timepoints = [key for key in h5['concentration']['co2'].keys()]
self.timepoints.sort(key=int)
self.dims = list(h5['concentration']['co2']['0'].shape)
self.nutrients = list(h5['concentration'].keys())
self.collect_positions(h5)
self.get_nutrient_grid(h5)
h5.close()
except:
print('Missing HDF5 file')
self.biomass = pd.read_csv(os.path.join(
self.directory,'Results','biomass.csv'),
usecols=[0,1,2],delimiter='\t')
self.ntypes = pd.read_csv(os.path.join(
self.directory,'Results','ntypes.csv'),
usecols=[0,1,2],delimiter='\t')
self.avg_con = pd.read_csv(os.path.join(
self.directory,'Results','avg_concentration.csv'),
usecols=[0,2,3,4],
delimiter='\t',
names=['Time','O2','Sucrose','CO2'],
skiprows=1)
f = open(os.path.join(self.directory,'metadata.json'),'r')
self.metadata = json.load(f)
f.close()
if 'IPTG' in self.metadata:
self.IPTG = self.metadata['IPTG']
self.sucRatio = self.metadata['IPTG']
else:
self.IPTG = self.metadata['SucRatio']
# TODO replace sucRatio with IPTG
self.sucRatio = self.metadata['SucRatio']
self.convert_units_avg_con()
self.convert_units_biomass()
def convert_units_avg_con(self):
"""Convert the object attribute avg_con, which contains the average nutrient concentration, units to hours and mM.
"""
self.avg_con.index = self.avg_con.Time/60/60*self.timestep
self.avg_con.index.name='Hours'
self.avg_con.drop('Time',inplace=True,axis=1)
SucroseMW = 342.3
O2MW = 32
CO2MW = 44.01
self.avg_con.O2 = self.avg_con.O2/O2MW*1e3
self.avg_con.Sucrose = self.avg_con.Sucrose/SucroseMW*1e3
self.avg_con.loc[:,'CO2'] = self.avg_con.loc[:,'CO2']/CO2MW*1e3
def convert_units_biomass(self):
"""Convert the object attribute biomass units to hours and femtograms.
"""
self.biomass.index = self.biomass.step/60/60*self.timestep
self.biomass.index.name='Hours'
self.biomass.iloc[:,1:]=self.biomass.iloc[:,1:]*1e18
def get_datafed_data(self,dir=None,orig_fname=True,wait=True):
"""
Collect NUFEB simulation data from a DataFed collection
Args:
dir (str):
Directory to download the DataFed collection to. Defaults to user/.nufeb/data/collection
orig_fname (bool):
Use original filenames
wait (bool):
Wait for the download to complete before moving on
"""
from datafed.CommandLib import API
df_api = API()
df_api.setContext('p/eng107')
if not dir:
dv_resp = df_api.collectionView(self.id)
self.directory = str((Path.home()) / '.nufeb_tools' / 'data' / dv_resp[0].coll[0].title)
get_resp = df_api.dataGet(self.id,
path=self.directory,
orig_fname=orig_fname,
wait=wait,
)
try:
h5 = h5py.File(os.path.join(self.directory,'trajectory.h5'))
self.timepoints = [key for key in h5['concentration']['co2'].keys()]
self.timepoints.sort(key=int)
self.dims = list(h5['concentration']['co2']['0'].shape)
self.nutrients = list(h5['concentration'].keys())
self.collect_positions(h5)
self.get_nutrient_grid(h5)
h5.close()
except:
print('Missing HDF5 file')
self.metadata = json.loads(dv_resp[0].data[0].metadata)
self.biomass = pd.read_csv(os.path.join(
self.directory,'biomass.csv'),
usecols=[0,1,2],delimiter='\t')
self.ntypes = pd.read_csv(os.path.join(
self.directory,'ntypes.csv'))
self.avg_con = pd.read_csv(os.path.join(
self.directory,'avg_concentration.csv'),
usecols=[0,2,3,4],
delimiter='\t',
names=['Time','O2','Sucrose','CO2'],
skiprows=1)
self.convert_units_avg_con()
self.convert_units_biomass()
#print(dv_resp)
def calc_biomass(self):
df = self.positions
df['biomass'] = 0
df.loc[df.type==1,'biomass'] = 4/3*np.pi*df['radius']**3*self.metadata['cyano']['Density']*1e18
df.loc[df.type==2,'biomass'] = 4/3*np.pi*df['radius']**3*self.metadata['ecw']['Density']*1e18
df['time'] = df['Timestep'].transform(lambda j: j/360)
def collect_positions(self,h5):
"""
Extract the x, y, z position of each cell during the simulation.
Args:
timepoint (int):
The simulation timestep to get the position data from.
Returns:
pandas.DataFrame:
Dataframe containing Timestep, ID, type, radius, x, y, z columns
"""
dfs = list()
for t in self.timepoints:
dfs.append(
pd.concat([pd.Series(np.ones(h5['x'][str(t)].len())*int(t),dtype=int,name='Timestep'),
pd.Series(h5['id'][str(t)],name='ID'),
pd.Series(h5['type'][str(t)],name='type'),
pd.Series(h5['radius'][str(t)],name='radius'),
pd.Series(h5['x'][str(t)],name='x'),
pd.Series(h5['y'][str(t)],name='y'),
pd.Series(h5['z'][str(t)],name='z')],axis=1)
)
temp = pd.concat(dfs,ignore_index=True)
idx = temp[temp.type==0].index
self.positions = temp.drop(idx).reset_index(drop=True)
def get_neighbor_distance(self,id,timepoint):
"""
Get the nearest neighbor cell distances
Args:
id (int):
The ID of the reference cell
timepoint (int):
The timepoint to check the neighbor distances from
Returns:
pandas.DataFrame:
Dataframe containing ID, type, Distance
"""
# TODO Speed up or parallelize this computation
df = self.positions[self.positions.Timestep==timepoint]
temp = (df.loc[df.ID ==id,['x','y','z']].squeeze() - df.loc[df.ID !=id,['x','y','z']])**2
dist = pd.Series(np.sqrt(temp.x + temp.y + temp.z),name='Distance')
return pd.concat([df.loc[df.ID !=id,['ID','type']],dist],axis=1).reset_index(drop=True)
def get_neighbors(self,timestep):
"""
Get the nearest neighbor cell distances
Args:
timestep (int):
The timepoint to check the neighbor distances from
Returns:
pd.DataFrame:
Pandas dataframe containing pairwise neighbor distances
"""
df = self.positions
df2 = df[df.Timestep == timestep].set_index(['ID'])
df2.sort_index(inplace=True)
# distances =pdist(df2[['x','y','z']])
pairwise = pd.DataFrame(
squareform(pdist(df2[['x','y','z']])),
columns = df2.index,
index = df2.index
)
pairwise[pairwise ==0] = np.nan
return pairwise
def get_mothers__old(self):
"""
Assign mother cells based on initial cells in the simulation.
Returns:
pandas.DataFrame:
Dataframe containing ID, type, position, radius, and mother_cell
"""
df = self.positions
df['mother_cell'] = -1
for ID in df.loc[df.Timestep==0,'ID'].unique():
idx = df[df['ID'] ==ID].index
df.loc[idx,'mother_cell'] = ID
for time in tqdm(sorted(df[df.Timestep!=0].Timestep.unique()),desc='Assigning ancestry'):
for type_ in df.type.unique():
ancestors = df[(df.type==type_) & (df.Timestep==time) & (df.mother_cell != -1)]
arr1 = ancestors[['x','y','z']].to_numpy()
tree1 = KDTree(arr1)
motherless = df[(df.type==type_) & (df.Timestep==time) & (df.mother_cell == -1)]
if not motherless.empty:
d, i = tree1.query(motherless[['x','y','z']].to_numpy(), k=1)
idx1 =motherless.index
a = ancestors.iloc[i,:].mother_cell.values
df.loc[idx1,'mother_cell']=a
self.colonies = df
def get_mothers(self):
"""
Assign mother cells based on initial cells in the simulation.
Returns:
pandas.DataFrame:
Dataframe containing Timestep, ID, type, position, radius, biomass, total biomass, and mother_cell
"""
df = self.positions.copy()
df['mother_cell'] = -1
df.loc[df.Timestep==0,'mother_cell'] = df.loc[df.Timestep==0,'ID']
ancestry_df = df.loc[df.Timestep==0,['ID','mother_cell']]
type_=1
for time in tqdm(sorted(df[df.Timestep!=0].Timestep.unique()),desc='Assigning ancestry'):
for type_ in df.type.unique():
temp = df.loc[(df.type==type_) & (df.Timestep==time),['ID','x','y','z']]
ancestors = temp.join(ancestry_df.set_index(['ID']),on='ID',how='inner', lsuffix='_left', rsuffix='_right')
arr = ancestors[['x','y','z']].to_numpy()
tree= KDTree(arr)
motherless = pd.merge(temp,ancestors,on='ID',how='left', indicator=True).query('_merge == "left_only"').drop('_merge', 1).drop('x_y',1).iloc[:,:4]
if not motherless.empty:
d, i = tree.query(motherless[['x_x','y_x','z_x']].to_numpy(), k=1)
motherless.loc[:,'mother_cell'] = ancestors.iloc[i,4].to_numpy()
ancestry_df = pd.concat([ancestry_df,motherless.loc[:,['ID','mother_cell']]],ignore_index=True)
df = df.join(ancestry_df.set_index(['ID']),on='ID',how='right', lsuffix='_left', rsuffix='').drop('mother_cell_left',1)
df['total_biomass'] = df.groupby(['mother_cell','Timestep']).cumsum()['biomass']
self.colonies = df
def count_colony_area(self,timestep):
"""
Count the 2d area in pixel dimensions of each colony at a given timestep.
Args:
timestep (int):
Timestep to count
"""
if not hasattr(self,'colonies'):
self.get_mothers()
df = self.colonies
else:
df = self.colonies
tp = df[df.Timestep == timestep]
img_size = 2000
bk = 255 * np.ones(shape=[img_size, img_size, 3], dtype=np.uint8)
circles = [cv2.circle(bk,center = (round(x/self.metadata['Dimensions'][0]*img_size),
round(y/self.metadata['Dimensions'][1]*img_size)),radius = round(radius/self.metadata['Dimensions'][1]*img_size),
color = (cell,0,0),thickness = -1) for x,y, radius,cell in zip(tp.x,tp.y,tp.radius,tp.mother_cell)]
cols, counts = np.unique(bk[:,:,0],return_counts=1)
for colony,area in zip(cols[:-1],counts[:-1]):
idx = df[(df.mother_cell==int(colony)) & (df.Timestep==timestep)].index
self.colonies.loc[idx,'Colony Area'] = area
def get_colony_areas(self):
"""Count colony areas for all timesteps
"""
if not hasattr(self,'colonies'):
self.get_mothers()
df = self.colonies
else:
df = self.colonies
for time in tqdm(df.Timestep.unique(),desc='Counting colony areas'):
self.count_colony_area(time)
def get_nutrient_grid(self,h5):
# TODO make nutrient grid function independent of h5 file
keys = list(h5['concentration'].keys())
timepoints = [k for k in h5['concentration'][keys[0]].keys()]
timepoints.sort(key=int)
stacks = list()
for key in keys:
dfs = list()
for time in timepoints:
dfs.append(h5['concentration'][key][time])
stacks.append(np.stack(dfs))
grid = np.stack(stacks,axis=1)
self.grid = grid
return
def get_local_con(self,timestep,cellID):
"""
Get the local nutrient concentration of a cell
Args:
timestep (int):
The timestep at which to check the concentration
cellID (int):
The cell identification number
Returns:
Nutrient Concentration (float):
The concentration of the specified nutrient within the cell's grid
"""
cell_locs = self.positions
grid = [np.linspace(0,self.metadata['Dimensions'][x],self.dims[x]) for x in range(3)]
grid_loc = [get_grid_idx(grid[i],cell_locs[cell_locs.ID ==cellID][d].values[0]) for i,d in enumerate(['x','y','z'])]
return self.grid[timestep,:,grid_loc[2],grid_loc[0],grid_loc[1]]
def get_fitness(self,timestep,cellID):
"""
Get the fitness of an individual cell based on the relative Monod growth rate at a given timestep
Args:
timestep (int):
The timestep at which to check the concentration
cellID (int):
The cell identification number
Returns:
float:
The Monod growth rate (1/s)
"""
# TODO Speed up or parallelize this computation
df = self.positions
cell_type = df[(df.Timestep ==timestep) & (df.ID ==cellID)].type.values[0]
if df[(df.Timestep ==timestep) & (df.ID ==cellID)].empty:
print('Timestep or cell ID not found')
return
concentrations = self.get_local_con(list(df.Timestep.unique()).index(timestep),cellID)
if cell_type == 1:
metadata = self.metadata['cyano']
light = concentrations[self.nutrients.index('sub')]
co2 = concentrations[self.nutrients.index('co2')]
fitness = metadata['GrowthRate'] * (light / (metadata['K_s']['sub'] + light)) * (co2 / (metadata['K_s']['co2'] + co2))
return fitness
elif cell_type == 2:
metadata = self.metadata['ecw']
suc = concentrations[self.nutrients.index('suc')]
o2 = concentrations[self.nutrients.index('o2')]
maintenance = metadata['GrowthParams']['Maintenance'] * (o2 / (metadata['K_s']['o2'] + o2))
decay = metadata['GrowthParams']['Decay']
fitness = metadata['GrowthRate'] * (suc / (metadata['K_s']['suc'] + suc)) * (o2 / (metadata['K_s']['o2'] + o2))
return fitness - maintenance - decay
def collect_fitness(self):
df = self.positions
fitness = pd.DataFrame(columns=['Time','ID','Fitness'])
for time in tqdm(self.Timesteps):
for cell in df[(df.Timestep==time)].ID:
fitness = fitness.append(pd.DataFrame([[time,cell,self.get_fitness(time,cell)]],columns=['Time','ID','Fitness']),ignore_index=True)
self.fitness=fitness
def get_grid_idx(array,value):
"""
Find the nutrient grid index value. Taken from https://stackoverflow.com/questions/2566412/find-nearest-value-in-numpy-array.
Args:
array (numpy.array):
1D Array containing the grid positions
value (float):
Cell location to map to the grid
Returns:
index (int):
Grid index
"""
n = len(array)
jl = 0# Initialize lower
ju = n-1# and upper limits.
while (ju-jl > 1):# If we are not yet done,
jm=(ju+jl) >> 1# compute a midpoint with a bitshift
if (value >= array[jm]):
jl=jm# and replace either the lower limit
else:
ju=jm# or the upper limit, as appropriate.
# Repeat until the test condition is satisfied.
if (value == array[0]):# edge cases at bottom
return 0
elif (value == array[n-1]):# and top
return n-1
else:
return jl
def download_test_data(urls=urls):
"""
Get an example dataset from the Github repo. Downloads to "home/.nufeb_tools/data"
Args:
urls (List(str))
"""
# nufeb_tools directory
cp_dir = Path.home().joinpath('.nufeb_tools')
cp_dir.mkdir(exist_ok=True)
data_dir = cp_dir.joinpath('data')
data_dir.mkdir(exist_ok=True)
# TODO Add progress bar
for url in urls:
parts = urlparse(url)
filename = os.path.basename(parts.path)
cached_file = os.path.join(data_dir, filename)
if not os.path.exists(cached_file):
local_filename, headers = urlretrieve(url, cached_file)
tar = tarfile.open(local_filename,'r')
tar.extractall(path=data_dir)
tar.close()
Path(local_filename).unlink()
def upload_datafed(file, title, collection_id,metadata_file):
"""
Create a data collection to hold NUFEB data in DataFed
Args:
file (str):
Path of file to upload
title (str):
Name to use for file on DataFed
collection_id (str):
The identifier of the collection to store the file
metadata_file (str):
Path of the metadata file to append to the data file
"""
filename = file
file_title= title
global_coll_id = collection_id
from datafed.CommandLib import API
df_api = API()
if metadata_file != '':
pkl_file = metadata_file
with open(pkl_file, 'rb') as f:
metadata = pickle.load(f)
rec_msg = df_api.dataCreate(title = file_title,
alias = '',
metadata=json.dumps(metadata),
parent_id=global_coll_id,
)
rec_id = rec_msg[0].data[0].id
#Use as pathname the path and name of the file you wish to move from CADES to DataFed
pput_msg = df_api.dataPut(rec_id, filename, wait=False)
#_logger.info(pput_msg)
else:
#_logger.debug('No metadata file found')
sys.exit(1)
def create_datafed_collection(n_cyanos, n_ecw, SucPct,dims):
"""
Create a data collection to hold NUFEB data in DataFed
Args:
n_cyanos (int):
Number of initial cyanobacteria
n_ecw (int):
Number of initial E. coli
SucPct (int):
Percentage of sucrose secretion activation
dims (List(float)):
x, y, z simulation boundaries
"""
try:
from datafed.CommandLib import API
df_api = API()
df_api.setContext('p/eng107')
collectionName = f'NUFEB_{n_cyanos}_{n_ecw}_{SucPct}_{dims[0]}_{dims[1]}_{dims[2]}'
parent_collection = df_api.getAuthUser().split('/')[1]
coll_msg = df_api.collectionCreate(collectionName,
parent_id=parent_collection)
global_coll_id = coll_msg[0].coll[0].id
#_logger.info(global_coll_id)
except:
global_coll_id = None
#_logger.debug('Unable to create collection')
return global_coll_id
def verify_datafed_connection():
"""
Verify Datafed installation and connection
"""
try:
from datafed.CommandLib import API
except ImportError:
# _logger.info('datafed not found. Installing from pip.')
subprocess.call([sys.executable, "-m", "pip", "install", 'datafed'])
from datafed.CommandLib import API
df_api = API()
#print('Success! You have DataFed: ' + df_ver)
# Verify user authentication
if not df_api.getAuthUser():
print('You have not authenticated into DataFed Client')
# Check default Globus endpoint
if not df_api.endpointDefaultGet():
endpoint = 'cades#CADES-OR'
df_api.endpointDefaultSet(endpoint)
#print('Your default Globus Endpoint in DataFed is:\n' + df_api.endpointDefaultGet())
# Test the endpoint
path = str((Path.home()) / '.nufeb_tools' / 'datafed')
cp_dir = (Path.home()) / '.nufeb_tools' / 'datafed'
cp_dir.mkdir(exist_ok=True)
dget_resp = df_api.dataGet('d/35437908',
path,
wait=True)
# _logger.debug(dget_resp)
if dget_resp[0].task[0].status == 3:
file = (Path.home()) / '.nufeb_tools' / 'datafed' /'35437908.md5sum'
file.unlink()
else:
if dget_resp[0].task[0].msg == "globus connect offline":
print('You need to activate your Globus Endpoint and/or ensure Globus Connect Personal is running.\n'
'Please visit https://globus.org to activate your Endpoint')
sys.exit(1)
elif dget_resp[0].task[0].msg == "permission denied":
print('Globus does not have write access to this directory. \n'
'If you are using Globus Connect Personal, ensure that this notebook runs within'
'one of the directories where Globus has write access. You may consider moving this'
'notebook to a valid directory or add this directory to the Globus Connect Personal settings')
sys.exit(1)
else:
NotImplementedError('Get in touch with us or consider looking online to find a solution to this problem:\n' + dget_resp[0].task[0].msg)
sys.exit(1)
| 40.642857
| 162
| 0.582172
|
089e3fa219424451b09f96298c4668bf9cb6732f
| 1,379
|
py
|
Python
|
tsdb_app.py
|
loblab/resouce-simulator
|
a3d62f32ec1f377548519e7aa4eaef10d5bdd0c2
|
[
"Apache-2.0"
] | null | null | null |
tsdb_app.py
|
loblab/resouce-simulator
|
a3d62f32ec1f377548519e7aa4eaef10d5bdd0c2
|
[
"Apache-2.0"
] | null | null | null |
tsdb_app.py
|
loblab/resouce-simulator
|
a3d62f32ec1f377548519e7aa4eaef10d5bdd0c2
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/python3
from influxdb import InfluxDBClient
from base_app import *
class TsdbApp(BaseApp):
DEFAULT_HOST = "influxdb"
DEFAULT_PORT = 8086
DEFAULT_DATA = "resim"
DEFAULT_USER = ""
DEFAULT_PSWD = ""
def base_init(self):
self.argps.add_argument('-s', '--host', dest='host', type=str, default=self.DEFAULT_HOST,
help="InfluxDB server address. default '%s'" % self.DEFAULT_HOST)
self.argps.add_argument('-p', '--port', dest='port', type=int, default=self.DEFAULT_PORT,
help="InfluxDB server port. default %d" % self.DEFAULT_PORT)
self.argps.add_argument('-d', '--data', dest='data', type=str, default=self.DEFAULT_DATA,
help="InfluxDB server database. default '%s'" % self.DEFAULT_DATA)
self.argps.add_argument('-u', '--user', dest='user', type=str, default=self.DEFAULT_USER,
help="InfluxDB server user name. default '%s'" % self.DEFAULT_USER)
self.argps.add_argument('-w', '--pswd', dest='pswd', type=str, default=self.DEFAULT_PSWD,
help="InfluxDB server password. default '%s'" % self.DEFAULT_PSWD)
def startup(self):
self.log.info("Connect to InfluxDB (%s:%d)...", self.args.host, self.args.port)
self.tsdb = InfluxDBClient(self.args.host, self.args.port,
self.args.user, self.args.pswd, self.args.data)
| 44.483871
| 97
| 0.645395
|
3a3da41d2a442329ea985c0c42655c39d3b1a911
| 1,805
|
py
|
Python
|
fork/protocols/farmer_protocol.py
|
Fork-Network/fork-blockchain
|
4e7c55b5787376dabacc8049eac49c0bb0bfd855
|
[
"Apache-2.0"
] | 7
|
2021-07-23T22:06:56.000Z
|
2022-02-09T04:30:23.000Z
|
fork/protocols/farmer_protocol.py
|
Fork-Network/fork-blockchain
|
4e7c55b5787376dabacc8049eac49c0bb0bfd855
|
[
"Apache-2.0"
] | null | null | null |
fork/protocols/farmer_protocol.py
|
Fork-Network/fork-blockchain
|
4e7c55b5787376dabacc8049eac49c0bb0bfd855
|
[
"Apache-2.0"
] | 2
|
2021-07-29T10:11:56.000Z
|
2021-08-01T19:37:18.000Z
|
from dataclasses import dataclass
from typing import Optional
from blspy import G2Element
from fork.types.blockchain_format.pool_target import PoolTarget
from fork.types.blockchain_format.proof_of_space import ProofOfSpace
from fork.types.blockchain_format.sized_bytes import bytes32
from fork.util.ints import uint8, uint32, uint64
from fork.util.streamable import Streamable, streamable
"""
Protocol between farmer and full node.
Note: When changing this file, also change protocol_message_types.py, and the protocol version in shared_protocol.py
"""
@dataclass(frozen=True)
@streamable
class NewSignagePoint(Streamable):
challenge_hash: bytes32
challenge_chain_sp: bytes32
reward_chain_sp: bytes32
difficulty: uint64
sub_slot_iters: uint64
signage_point_index: uint8
@dataclass(frozen=True)
@streamable
class DeclareProofOfSpace(Streamable):
challenge_hash: bytes32
challenge_chain_sp: bytes32
signage_point_index: uint8
reward_chain_sp: bytes32
proof_of_space: ProofOfSpace
challenge_chain_sp_signature: G2Element
reward_chain_sp_signature: G2Element
farmer_puzzle_hash: bytes32
pool_target: Optional[PoolTarget]
pool_signature: Optional[G2Element]
@dataclass(frozen=True)
@streamable
class RequestSignedValues(Streamable):
quality_string: bytes32
foliage_block_data_hash: bytes32
foliage_transaction_block_hash: bytes32
@dataclass(frozen=True)
@streamable
class FarmingInfo(Streamable):
challenge_hash: bytes32
sp_hash: bytes32
timestamp: uint64
passed: uint32
proofs: uint32
total_plots: uint32
@dataclass(frozen=True)
@streamable
class SignedValues(Streamable):
quality_string: bytes32
foliage_block_data_signature: G2Element
foliage_transaction_block_signature: G2Element
| 26.15942
| 116
| 0.801662
|
32b54743e28f398ebfec83cb5825e025ed9f7943
| 474
|
py
|
Python
|
geom2d/point.py
|
VLovets/python_01
|
6521fcac0dfedc456e8a9263e7705616eb642078
|
[
"Apache-2.0"
] | null | null | null |
geom2d/point.py
|
VLovets/python_01
|
6521fcac0dfedc456e8a9263e7705616eb642078
|
[
"Apache-2.0"
] | null | null | null |
geom2d/point.py
|
VLovets/python_01
|
6521fcac0dfedc456e8a9263e7705616eb642078
|
[
"Apache-2.0"
] | null | null | null |
__author__ = 'vlovets'
from math import sqrt
class Point:
def __init__(self, _x, _y):
self.x = _x
self.y = _y
def distance(self, p2):
dx = p2.x - self.x
dy = p2.y - self.y
return sqrt(dx*dx + dy*dy)
def __eq__(self, other):
return self.x == other.x and self.y == other.y
# def __lt__(self, other):
# return self.y < other.y
def __repr__(self):
return 'Point(%s, %s)' % (self.x, self.y)
| 20.608696
| 54
| 0.537975
|
5a54f4209e4550ab78aec71752465e18f5c80f81
| 4,512
|
py
|
Python
|
src/data/musdb_amss/audio_deeffect.py
|
ws-choi/AMSS-Net
|
9e906f64365526dbb184340ac9565ac8bc32f830
|
[
"MIT"
] | 4
|
2021-07-16T04:11:50.000Z
|
2022-02-09T13:51:26.000Z
|
src/data/musdb_amss/audio_deeffect.py
|
ws-choi/AMSS-Net
|
9e906f64365526dbb184340ac9565ac8bc32f830
|
[
"MIT"
] | null | null | null |
src/data/musdb_amss/audio_deeffect.py
|
ws-choi/AMSS-Net
|
9e906f64365526dbb184340ac9565ac8bc32f830
|
[
"MIT"
] | 1
|
2022-01-11T07:20:30.000Z
|
2022-01-11T07:20:30.000Z
|
import random
from itertools import permutations, combinations
import numpy as np
from src.amss.amss_desc.abstract import Selective_Audio_Editing
from src.amss.amss_desc.sound_effects import SndFx
from src.data.musdb_amss.audio_effect import effects_dict_ijcai
from src.utils.functions import normalize
targets = ['vocals', 'drums', 'bass']
def get_target_index(target):
return targets.index(target)
class Musdb_DeEffect(Selective_Audio_Editing):
def __init__(self, snd_fx: SndFx, target_names):
self.targets = target_names
self.targets_index = [get_target_index(target) for target in self.targets]
self.snd_fx = snd_fx
target_permute = list(permutations(self.targets))
self.descriptions = ['remove {} from {}'.format(snd_fx.name, ', '.join(d))
for d
in target_permute]
self.n_descriptions = len(self.descriptions)
self.__paired_separate__ = 'separate {}'.format(', '.join(target_permute[0]))
self.__paired_mute__ = 'mute {}'.format(', '.join(target_permute[0]))
def edit(self, unmixed_track: np.ndarray):
manipulated_track = np.copy(unmixed_track)
for idx in range(3):
if idx in self.targets_index:
# unmixed_track[idx] = self.snd_fx(unmixed_track[idx])
# else:
manipulated_track[idx] = self.snd_fx(manipulated_track[idx])
# unmixed_track[idx] = self.snd_fx(unmixed_track[idx])
linear_sum = np.sum(unmixed_track, axis=0)
manipulated_linear_sum = np.sum(manipulated_track, axis=0)
linear_sum, manipulated_linear_sum = normalize(linear_sum, manipulated_linear_sum)
return self.gen_desc(), manipulated_linear_sum, linear_sum
def edit_with_default_desc(self, unmixed_track):
manipulated_track = np.copy(unmixed_track)
for idx in range(3):
if idx in self.targets_index:
# unmixed_track[idx] = self.snd_fx(unmixed_track[idx])
# else:
manipulated_track[idx] = self.snd_fx(manipulated_track[idx])
# unmixed_track[idx] = self.snd_fx(unmixed_track[idx])
linear_sum = np.sum(unmixed_track, axis=0)
manipulated_linear_sum = np.sum(manipulated_track, axis=0)
linear_sum, manipulated_linear_sum = normalize(linear_sum, manipulated_linear_sum)
return self.gen_desc_default(), manipulated_linear_sum, linear_sum
def edit_for_test(self, unmixed_track):
manipulated_linear_sum = np.copy(unmixed_track)
tar_before = np.zeros_like(manipulated_linear_sum[0])
tar_after = np.zeros_like(manipulated_linear_sum[0])
acc = np.zeros_like(manipulated_linear_sum[0])
for idx in range(4):
if idx in self.targets_index:
tar_after = tar_after + manipulated_linear_sum[idx]
manipulated_linear_sum[idx] = self.snd_fx(manipulated_linear_sum[idx])
tar_before = tar_before + manipulated_linear_sum[idx]
else:
acc = acc + manipulated_linear_sum[idx]
linear_sum = np.sum(unmixed_track, axis=0)
manipulated_linear_sum = np.sum(manipulated_linear_sum, axis=0)
max_scale = max(linear_sum.max(), manipulated_linear_sum.max())
max_scale = 1 if max_scale < 1 else max_scale
linear_sum, manipulated_linear_sum = linear_sum / max_scale, manipulated_linear_sum / max_scale
tar_before, tar_after, acc = tar_before / max_scale, tar_after / max_scale, acc / max_scale
after, before = linear_sum, manipulated_linear_sum
return before, after, tar_before, tar_after, acc
def gen_desc(self):
idx = random.randint(0, self.n_descriptions - 1)
return self.descriptions[idx]
def gen_desc_default(self):
return self.descriptions[0]
def gen_paired_mute(self):
return self.__paired_mute__
def gen_paired_separate(self):
return self.__paired_separate__
def __str__(self):
return 'AMSS: ' + self.gen_desc()
def amss_deeffect_generator(effect_names=effects_dict_ijcai.keys()):
amss_deeffect_set = []
for i in [1, 2, 3]:
for comb in combinations(targets, i):
for effect_name in effect_names:
if 'reverb' in effect_name:
amss_deeffect_set.append(Musdb_DeEffect(effects_dict_ijcai[effect_name], comb))
return amss_deeffect_set
| 37.6
| 103
| 0.670878
|
4b03e92b48e0377cfb262d8bf9668351410096f2
| 2,974
|
py
|
Python
|
tests/granular/test_sentences.py
|
EpcLoler/nlp_profiler
|
bf609457866faf4c4f8e9b57ef2b2e07227cf8e9
|
[
"Apache-2.0"
] | 221
|
2020-09-06T14:07:37.000Z
|
2022-03-26T08:18:24.000Z
|
tests/granular/test_sentences.py
|
EpcLoler/nlp_profiler
|
bf609457866faf4c4f8e9b57ef2b2e07227cf8e9
|
[
"Apache-2.0"
] | 57
|
2020-09-12T13:55:36.000Z
|
2022-01-21T01:14:37.000Z
|
tests/granular/test_sentences.py
|
EpcLoler/nlp_profiler
|
bf609457866faf4c4f8e9b57ef2b2e07227cf8e9
|
[
"Apache-2.0"
] | 37
|
2020-09-06T17:05:32.000Z
|
2022-02-22T10:04:43.000Z
|
import numpy as np
import pytest
from nlp_profiler.constants import NaN
from nlp_profiler.granular_features.sentences import gather_sentences, count_sentences # noqa
text_with_emojis = "I love ⚽ very much 😁"
text_with_emojis_ends_with_period = "I love ⚽ very much 😁."
text_with_a_number = '2833047 people live in this area.'
text_with_two_sentences = text_with_a_number + " " + text_with_emojis
text_to_return_value_mapping = [
(np.nan, []),
(float('nan'), []),
(None, []),
]
@pytest.mark.parametrize("text,expected_result",
text_to_return_value_mapping)
def test_given_invalid_text_when_parsed_then_return_empty_list(
text: str, expected_result: str
):
# given, when
actual_result = gather_sentences(text)
# then
assert expected_result == actual_result, \
f"Expected: {expected_result}, Actual: {actual_result}"
text_to_return_count_mapping = [
(np.nan, NaN),
(float('nan'), NaN),
(None, NaN),
]
@pytest.mark.parametrize("text,expected_result",
text_to_return_count_mapping)
def test_given_invalid_text_when_counted_then_return_NaN(
text: str, expected_result: float
):
# given, when
actual_result = count_sentences(text)
# then
assert expected_result is actual_result, \
f"Expected: {expected_result}, Actual: {actual_result}"
@pytest.mark.parametrize("text,expected_result",
[
(text_with_emojis, 1),
(text_with_emojis_ends_with_period, 1),
(text_with_a_number, 1),
(text_with_two_sentences, 2),
('....', 1),
(';;;;;;', 1),
('', 0),
(' ', 0),
('a', 1),
('⚽😁', 1),
])
def test_given_a_text_with_sentences_when_counted_then_return_the_count_of_sentences(
text, expected_result
):
# given, when
actual_result = count_sentences(text)
# then
assert actual_result == expected_result, \
"Didn't find the expected number of sentence in the text. " \
f"Expected: {expected_result}, Actual: {actual_result}"
@pytest.mark.parametrize("text,expected_result",
[
(text_with_a_number, [text_with_a_number]),
(text_with_two_sentences, [text_with_a_number, text_with_emojis]),
])
def test_given_a_text_with_sentences_when_parsed_then_return_the_sentences(
text: str, expected_result: list
):
# given, when
actual_result = gather_sentences(text)
# then
assert expected_result == actual_result, \
"Didn't find the expected sentence(s) in the text." \
f"Expected: {expected_result}, Actual: {actual_result}"
| 32.326087
| 95
| 0.596503
|
9f96edd90b779a5cba20a6f39e5275774325a1aa
| 5,084
|
py
|
Python
|
magenta/models/performance_rnn/performance_rnn_train.py
|
brycecr/magenta
|
24c14ca346506f31d789db35bb1b0173b7d97ce6
|
[
"Apache-2.0"
] | null | null | null |
magenta/models/performance_rnn/performance_rnn_train.py
|
brycecr/magenta
|
24c14ca346506f31d789db35bb1b0173b7d97ce6
|
[
"Apache-2.0"
] | null | null | null |
magenta/models/performance_rnn/performance_rnn_train.py
|
brycecr/magenta
|
24c14ca346506f31d789db35bb1b0173b7d97ce6
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2020 The Magenta Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Train and evaluate a performance RNN model."""
import os
import magenta
from magenta.models.performance_rnn import performance_model
from magenta.models.shared import events_rnn_graph
from magenta.models.shared import events_rnn_train
import tensorflow.compat.v1 as tf
FLAGS = tf.app.flags.FLAGS
tf.app.flags.DEFINE_string('run_dir', '/tmp/performance_rnn/logdir/run1',
'Path to the directory where checkpoints and '
'summary events will be saved during training and '
'evaluation. Separate subdirectories for training '
'events and eval events will be created within '
'`run_dir`. Multiple runs can be stored within the '
'parent directory of `run_dir`. Point TensorBoard '
'to the parent directory of `run_dir` to see all '
'your runs.')
tf.app.flags.DEFINE_string('config', 'performance', 'The config to use')
tf.app.flags.DEFINE_string('sequence_example_file', '',
'Path to TFRecord file containing '
'tf.SequenceExample records for training or '
'evaluation.')
tf.app.flags.DEFINE_integer('num_training_steps', 0,
'The the number of global training steps your '
'model should take before exiting training. '
'Leave as 0 to run until terminated manually.')
tf.app.flags.DEFINE_integer('num_eval_examples', 0,
'The number of evaluation examples your model '
'should process for each evaluation step.'
'Leave as 0 to use the entire evaluation set.')
tf.app.flags.DEFINE_integer('summary_frequency', 10,
'A summary statement will be logged every '
'`summary_frequency` steps during training or '
'every `summary_frequency` seconds during '
'evaluation.')
tf.app.flags.DEFINE_integer('num_checkpoints', 10,
'The number of most recent checkpoints to keep in '
'the training directory. Keeps all if 0.')
tf.app.flags.DEFINE_boolean('eval', False,
'If True, this process only evaluates the model '
'and does not update weights.')
tf.app.flags.DEFINE_string('log', 'INFO',
'The threshold for what messages will be logged '
'DEBUG, INFO, WARN, ERROR, or FATAL.')
tf.app.flags.DEFINE_string(
'hparams', '',
'Comma-separated list of `name=value` pairs. For each pair, the value of '
'the hyperparameter named `name` is set to `value`. This mapping is merged '
'with the default hyperparameters.')
def main(unused_argv):
tf.logging.set_verbosity(FLAGS.log)
if not FLAGS.run_dir:
tf.logging.fatal('--run_dir required')
return
if not FLAGS.sequence_example_file:
tf.logging.fatal('--sequence_example_file required')
return
sequence_example_file_paths = tf.gfile.Glob(
os.path.expanduser(FLAGS.sequence_example_file))
run_dir = os.path.expanduser(FLAGS.run_dir)
config = performance_model.default_configs[FLAGS.config]
config.hparams.parse(FLAGS.hparams)
mode = 'eval' if FLAGS.eval else 'train'
build_graph_fn = events_rnn_graph.get_build_graph_fn(
mode, config, sequence_example_file_paths)
train_dir = os.path.join(run_dir, 'train')
tf.gfile.MakeDirs(train_dir)
tf.logging.info('Train dir: %s', train_dir)
if FLAGS.eval:
eval_dir = os.path.join(run_dir, 'eval')
tf.gfile.MakeDirs(eval_dir)
tf.logging.info('Eval dir: %s', eval_dir)
num_batches = (
(FLAGS.num_eval_examples or
magenta.common.count_records(sequence_example_file_paths)) //
config.hparams.batch_size)
events_rnn_train.run_eval(build_graph_fn, train_dir, eval_dir, num_batches)
else:
events_rnn_train.run_training(build_graph_fn, train_dir,
FLAGS.num_training_steps,
FLAGS.summary_frequency,
checkpoints_to_keep=FLAGS.num_checkpoints)
def console_entry_point():
tf.disable_v2_behavior()
tf.app.run(main)
if __name__ == '__main__':
console_entry_point()
| 43.084746
| 80
| 0.632376
|
aabc56777febd4479fcd752e2986b0ee322904eb
| 145,934
|
py
|
Python
|
glance_docker/glance/tests/functional/v2/test_images.py
|
tobegit3hub/dockerized-software
|
3781bc1145b6fbb8d5fa2e2eaeaa3aa138a69632
|
[
"Apache-2.0"
] | null | null | null |
glance_docker/glance/tests/functional/v2/test_images.py
|
tobegit3hub/dockerized-software
|
3781bc1145b6fbb8d5fa2e2eaeaa3aa138a69632
|
[
"Apache-2.0"
] | null | null | null |
glance_docker/glance/tests/functional/v2/test_images.py
|
tobegit3hub/dockerized-software
|
3781bc1145b6fbb8d5fa2e2eaeaa3aa138a69632
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import signal
import uuid
from oslo_serialization import jsonutils
import requests
import six
# NOTE(jokke): simplified transition to py3, behaves like py2 xrange
from six.moves import range
from glance.tests import functional
from glance.tests import utils as test_utils
TENANT1 = str(uuid.uuid4())
TENANT2 = str(uuid.uuid4())
TENANT3 = str(uuid.uuid4())
TENANT4 = str(uuid.uuid4())
class TestImages(functional.FunctionalTest):
def setUp(self):
super(TestImages, self).setUp()
self.cleanup()
self.api_server.deployment_flavor = 'noauth'
self.api_server.data_api = 'glance.db.sqlalchemy.api'
for i in range(3):
ret = test_utils.start_http_server("foo_image_id%d" % i,
"foo_image%d" % i)
setattr(self, 'http_server%d_pid' % i, ret[0])
setattr(self, 'http_port%d' % i, ret[1])
def tearDown(self):
for i in range(3):
pid = getattr(self, 'http_server%d_pid' % i, None)
if pid:
os.kill(pid, signal.SIGKILL)
super(TestImages, self).tearDown()
def _url(self, path):
return 'http://127.0.0.1:%d%s' % (self.api_port, path)
def _headers(self, custom_headers=None):
base_headers = {
'X-Identity-Status': 'Confirmed',
'X-Auth-Token': '932c5c84-02ac-4fe5-a9ba-620af0e2bb96',
'X-User-Id': 'f9a41d13-0c13-47e9-bee2-ce4e8bfe958e',
'X-Tenant-Id': TENANT1,
'X-Roles': 'member',
}
base_headers.update(custom_headers or {})
return base_headers
def test_v1_none_properties_v2(self):
self.api_server.deployment_flavor = 'noauth'
self.api_server.use_user_token = True
self.api_server.send_identity_credentials = True
self.registry_server.deployment_flavor = ''
# Image list should be empty
self.start_servers(**self.__dict__.copy())
# Create an image (with two deployer-defined properties)
path = self._url('/v1/images')
headers = self._headers({'content-type': 'application/octet-stream'})
headers.update(test_utils.minimal_headers('image-1'))
# NOTE(flaper87): Sending empty string, the server will use None
headers['x-image-meta-property-my_empty_prop'] = ''
response = requests.post(path, headers=headers)
self.assertEqual(201, response.status_code)
data = jsonutils.loads(response.text)
image_id = data['image']['id']
# NOTE(flaper87): Get the image using V2 and verify
# the returned value for `my_empty_prop` is an empty
# string.
path = self._url('/v2/images/%s' % image_id)
response = requests.get(path, headers=self._headers())
self.assertEqual(200, response.status_code)
image = jsonutils.loads(response.text)
self.assertEqual('', image['my_empty_prop'])
self.stop_servers()
def test_not_authenticated_in_registry_on_ops(self):
# https://bugs.launchpad.net/glance/+bug/1451850
# this configuration guarantees that authentication succeeds in
# glance-api and fails in glance-registry if no token is passed
self.api_server.deployment_flavor = ''
# make sure that request will reach registry
self.api_server.data_api = 'glance.db.registry.api'
self.registry_server.deployment_flavor = 'fakeauth'
self.start_servers(**self.__dict__.copy())
headers = {'content-type': 'application/json'}
image = {'name': 'image', 'type': 'kernel', 'disk_format': 'qcow2',
'container_format': 'bare'}
# image create should return 401
response = requests.post(self._url('/v2/images'), headers=headers,
data=jsonutils.dumps(image))
self.assertEqual(401, response.status_code)
# image list should return 401
response = requests.get(self._url('/v2/images'))
self.assertEqual(401, response.status_code)
# image show should return 401
response = requests.get(self._url('/v2/images/someimageid'))
self.assertEqual(401, response.status_code)
# image update should return 401
ops = [{'op': 'replace', 'path': '/protected', 'value': False}]
media_type = 'application/openstack-images-v2.1-json-patch'
response = requests.patch(self._url('/v2/images/someimageid'),
headers={'content-type': media_type},
data=jsonutils.dumps(ops))
self.assertEqual(401, response.status_code)
# image delete should return 401
response = requests.delete(self._url('/v2/images/someimageid'))
self.assertEqual(401, response.status_code)
self.stop_servers()
def test_image_lifecycle(self):
# Image list should be empty
self.start_servers(**self.__dict__.copy())
path = self._url('/v2/images')
response = requests.get(path, headers=self._headers())
self.assertEqual(200, response.status_code)
images = jsonutils.loads(response.text)['images']
self.assertEqual(0, len(images))
# Create an image (with two deployer-defined properties)
path = self._url('/v2/images')
headers = self._headers({'content-type': 'application/json'})
data = jsonutils.dumps({'name': 'image-1', 'type': 'kernel',
'foo': 'bar', 'disk_format': 'aki',
'container_format': 'aki', 'abc': 'xyz'})
response = requests.post(path, headers=headers, data=data)
self.assertEqual(201, response.status_code)
image_location_header = response.headers['Location']
# Returned image entity should have a generated id and status
image = jsonutils.loads(response.text)
image_id = image['id']
checked_keys = set([
u'status',
u'name',
u'tags',
u'created_at',
u'updated_at',
u'visibility',
u'self',
u'protected',
u'id',
u'file',
u'min_disk',
u'foo',
u'abc',
u'type',
u'min_ram',
u'schema',
u'disk_format',
u'container_format',
u'owner',
u'checksum',
u'size',
u'virtual_size',
])
self.assertEqual(checked_keys, set(image.keys()))
expected_image = {
'status': 'queued',
'name': 'image-1',
'tags': [],
'visibility': 'private',
'self': '/v2/images/%s' % image_id,
'protected': False,
'file': '/v2/images/%s/file' % image_id,
'min_disk': 0,
'foo': 'bar',
'abc': 'xyz',
'type': 'kernel',
'min_ram': 0,
'schema': '/v2/schemas/image',
}
for key, value in expected_image.items():
self.assertEqual(value, image[key], key)
# Image list should now have one entry
path = self._url('/v2/images')
response = requests.get(path, headers=self._headers())
self.assertEqual(200, response.status_code)
images = jsonutils.loads(response.text)['images']
self.assertEqual(1, len(images))
self.assertEqual(image_id, images[0]['id'])
# Create another image (with two deployer-defined properties)
path = self._url('/v2/images')
headers = self._headers({'content-type': 'application/json'})
data = jsonutils.dumps({'name': 'image-2', 'type': 'kernel',
'bar': 'foo', 'disk_format': 'aki',
'container_format': 'aki', 'xyz': 'abc'})
response = requests.post(path, headers=headers, data=data)
self.assertEqual(201, response.status_code)
# Returned image entity should have a generated id and status
image = jsonutils.loads(response.text)
image2_id = image['id']
checked_keys = set([
u'status',
u'name',
u'tags',
u'created_at',
u'updated_at',
u'visibility',
u'self',
u'protected',
u'id',
u'file',
u'min_disk',
u'bar',
u'xyz',
u'type',
u'min_ram',
u'schema',
u'disk_format',
u'container_format',
u'owner',
u'checksum',
u'size',
u'virtual_size',
])
self.assertEqual(checked_keys, set(image.keys()))
expected_image = {
'status': 'queued',
'name': 'image-2',
'tags': [],
'visibility': 'private',
'self': '/v2/images/%s' % image2_id,
'protected': False,
'file': '/v2/images/%s/file' % image2_id,
'min_disk': 0,
'bar': 'foo',
'xyz': 'abc',
'type': 'kernel',
'min_ram': 0,
'schema': '/v2/schemas/image',
}
for key, value in expected_image.items():
self.assertEqual(value, image[key], key)
# Image list should now have two entries
path = self._url('/v2/images')
response = requests.get(path, headers=self._headers())
self.assertEqual(200, response.status_code)
images = jsonutils.loads(response.text)['images']
self.assertEqual(2, len(images))
self.assertEqual(image2_id, images[0]['id'])
self.assertEqual(image_id, images[1]['id'])
# Image list should list only image-2 as image-1 doesn't contain the
# property 'bar'
path = self._url('/v2/images?bar=foo')
response = requests.get(path, headers=self._headers())
self.assertEqual(200, response.status_code)
images = jsonutils.loads(response.text)['images']
self.assertEqual(1, len(images))
self.assertEqual(image2_id, images[0]['id'])
# Image list should list only image-1 as image-2 doesn't contain the
# property 'foo'
path = self._url('/v2/images?foo=bar')
response = requests.get(path, headers=self._headers())
self.assertEqual(200, response.status_code)
images = jsonutils.loads(response.text)['images']
self.assertEqual(1, len(images))
self.assertEqual(image_id, images[0]['id'])
# The "changes-since" filter shouldn't work on glance v2
path = self._url('/v2/images?changes-since=20001007T10:10:10')
response = requests.get(path, headers=self._headers())
self.assertEqual(400, response.status_code)
path = self._url('/v2/images?changes-since=aaa')
response = requests.get(path, headers=self._headers())
self.assertEqual(400, response.status_code)
# Image list should list only image-1 based on the filter
# 'foo=bar&abc=xyz'
path = self._url('/v2/images?foo=bar&abc=xyz')
response = requests.get(path, headers=self._headers())
self.assertEqual(200, response.status_code)
images = jsonutils.loads(response.text)['images']
self.assertEqual(1, len(images))
self.assertEqual(image_id, images[0]['id'])
# Image list should list only image-2 based on the filter
# 'bar=foo&xyz=abc'
path = self._url('/v2/images?bar=foo&xyz=abc')
response = requests.get(path, headers=self._headers())
self.assertEqual(200, response.status_code)
images = jsonutils.loads(response.text)['images']
self.assertEqual(1, len(images))
self.assertEqual(image2_id, images[0]['id'])
# Image list should not list anything as the filter 'foo=baz&abc=xyz'
# is not satisfied by either images
path = self._url('/v2/images?foo=baz&abc=xyz')
response = requests.get(path, headers=self._headers())
self.assertEqual(200, response.status_code)
images = jsonutils.loads(response.text)['images']
self.assertEqual(0, len(images))
# Get the image using the returned Location header
response = requests.get(image_location_header, headers=self._headers())
self.assertEqual(200, response.status_code)
image = jsonutils.loads(response.text)
self.assertEqual(image_id, image['id'])
self.assertIsNone(image['checksum'])
self.assertIsNone(image['size'])
self.assertIsNone(image['virtual_size'])
self.assertEqual('bar', image['foo'])
self.assertFalse(image['protected'])
self.assertEqual('kernel', image['type'])
self.assertTrue(image['created_at'])
self.assertTrue(image['updated_at'])
self.assertEqual(image['updated_at'], image['created_at'])
# The image should be mutable, including adding and removing properties
path = self._url('/v2/images/%s' % image_id)
media_type = 'application/openstack-images-v2.1-json-patch'
headers = self._headers({'content-type': media_type})
data = jsonutils.dumps([
{'op': 'replace', 'path': '/name', 'value': 'image-2'},
{'op': 'replace', 'path': '/disk_format', 'value': 'vhd'},
{'op': 'replace', 'path': '/container_format', 'value': 'ami'},
{'op': 'replace', 'path': '/foo', 'value': 'baz'},
{'op': 'add', 'path': '/ping', 'value': 'pong'},
{'op': 'replace', 'path': '/protected', 'value': True},
{'op': 'remove', 'path': '/type'},
])
response = requests.patch(path, headers=headers, data=data)
self.assertEqual(200, response.status_code, response.text)
# Returned image entity should reflect the changes
image = jsonutils.loads(response.text)
self.assertEqual('image-2', image['name'])
self.assertEqual('vhd', image['disk_format'])
self.assertEqual('baz', image['foo'])
self.assertEqual('pong', image['ping'])
self.assertTrue(image['protected'])
self.assertNotIn('type', image, response.text)
# Adding 11 image properties should fail since configured limit is 10
path = self._url('/v2/images/%s' % image_id)
media_type = 'application/openstack-images-v2.1-json-patch'
headers = self._headers({'content-type': media_type})
changes = []
for i in range(11):
changes.append({'op': 'add',
'path': '/ping%i' % i,
'value': 'pong'})
data = jsonutils.dumps(changes)
response = requests.patch(path, headers=headers, data=data)
self.assertEqual(413, response.status_code, response.text)
# Adding 3 image locations should fail since configured limit is 2
path = self._url('/v2/images/%s' % image_id)
media_type = 'application/openstack-images-v2.1-json-patch'
headers = self._headers({'content-type': media_type})
changes = []
for i in range(3):
url = ('http://127.0.0.1:%s/foo_image' %
getattr(self, 'http_port%d' % i))
changes.append({'op': 'add', 'path': '/locations/-',
'value': {'url': url, 'metadata': {}},
})
data = jsonutils.dumps(changes)
response = requests.patch(path, headers=headers, data=data)
self.assertEqual(413, response.status_code, response.text)
# Ensure the v2.0 json-patch content type is accepted
path = self._url('/v2/images/%s' % image_id)
media_type = 'application/openstack-images-v2.0-json-patch'
headers = self._headers({'content-type': media_type})
data = jsonutils.dumps([{'add': '/ding', 'value': 'dong'}])
response = requests.patch(path, headers=headers, data=data)
self.assertEqual(200, response.status_code, response.text)
# Returned image entity should reflect the changes
image = jsonutils.loads(response.text)
self.assertEqual('dong', image['ding'])
# Updates should persist across requests
path = self._url('/v2/images/%s' % image_id)
response = requests.get(path, headers=self._headers())
self.assertEqual(200, response.status_code)
image = jsonutils.loads(response.text)
self.assertEqual(image_id, image['id'])
self.assertEqual('image-2', image['name'])
self.assertEqual('baz', image['foo'])
self.assertEqual('pong', image['ping'])
self.assertTrue(image['protected'])
self.assertNotIn('type', image, response.text)
# Try to download data before its uploaded
path = self._url('/v2/images/%s/file' % image_id)
headers = self._headers()
response = requests.get(path, headers=headers)
self.assertEqual(204, response.status_code)
def _verify_image_checksum_and_status(checksum, status):
# Checksum should be populated and status should be active
path = self._url('/v2/images/%s' % image_id)
response = requests.get(path, headers=self._headers())
self.assertEqual(200, response.status_code)
image = jsonutils.loads(response.text)
self.assertEqual(checksum, image['checksum'])
self.assertEqual(status, image['status'])
# Upload some image data
path = self._url('/v2/images/%s/file' % image_id)
headers = self._headers({'Content-Type': 'application/octet-stream'})
response = requests.put(path, headers=headers, data='ZZZZZ')
self.assertEqual(204, response.status_code)
expected_checksum = '8f113e38d28a79a5a451b16048cc2b72'
_verify_image_checksum_and_status(expected_checksum, 'active')
# `disk_format` and `container_format` cannot
# be replaced when the image is active.
immutable_paths = ['/disk_format', '/container_format']
media_type = 'application/openstack-images-v2.1-json-patch'
headers = self._headers({'content-type': media_type})
path = self._url('/v2/images/%s' % image_id)
for immutable_path in immutable_paths:
data = jsonutils.dumps([
{'op': 'replace', 'path': immutable_path, 'value': 'ari'},
])
response = requests.patch(path, headers=headers, data=data)
self.assertEqual(403, response.status_code)
# Try to download the data that was just uploaded
path = self._url('/v2/images/%s/file' % image_id)
response = requests.get(path, headers=self._headers())
self.assertEqual(200, response.status_code)
self.assertEqual(expected_checksum, response.headers['Content-MD5'])
self.assertEqual('ZZZZZ', response.text)
# Uploading duplicate data should be rejected with a 409. The
# original data should remain untouched.
path = self._url('/v2/images/%s/file' % image_id)
headers = self._headers({'Content-Type': 'application/octet-stream'})
response = requests.put(path, headers=headers, data='XXX')
self.assertEqual(409, response.status_code)
_verify_image_checksum_and_status(expected_checksum, 'active')
# Ensure the size is updated to reflect the data uploaded
path = self._url('/v2/images/%s' % image_id)
response = requests.get(path, headers=self._headers())
self.assertEqual(200, response.status_code)
self.assertEqual(5, jsonutils.loads(response.text)['size'])
# Should be able to deactivate image
path = self._url('/v2/images/%s/actions/deactivate' % image_id)
response = requests.post(path, data={}, headers=self._headers())
self.assertEqual(204, response.status_code)
# Deactivating a deactivated image succeeds (no-op)
path = self._url('/v2/images/%s/actions/deactivate' % image_id)
response = requests.post(path, data={}, headers=self._headers())
self.assertEqual(204, response.status_code)
# Can't download a deactivated image
path = self._url('/v2/images/%s/file' % image_id)
response = requests.get(path, headers=self._headers())
self.assertEqual(403, response.status_code)
# Deactivated image should still be in a listing
path = self._url('/v2/images')
response = requests.get(path, headers=self._headers())
self.assertEqual(200, response.status_code)
images = jsonutils.loads(response.text)['images']
self.assertEqual(2, len(images))
self.assertEqual(image2_id, images[0]['id'])
self.assertEqual(image_id, images[1]['id'])
# Should be able to reactivate a deactivated image
path = self._url('/v2/images/%s/actions/reactivate' % image_id)
response = requests.post(path, data={}, headers=self._headers())
self.assertEqual(204, response.status_code)
# Reactivating an active image succeeds (no-op)
path = self._url('/v2/images/%s/actions/reactivate' % image_id)
response = requests.post(path, data={}, headers=self._headers())
self.assertEqual(204, response.status_code)
# Deletion should not work on protected images
path = self._url('/v2/images/%s' % image_id)
response = requests.delete(path, headers=self._headers())
self.assertEqual(403, response.status_code)
# Unprotect image for deletion
path = self._url('/v2/images/%s' % image_id)
media_type = 'application/openstack-images-v2.1-json-patch'
headers = self._headers({'content-type': media_type})
doc = [{'op': 'replace', 'path': '/protected', 'value': False}]
data = jsonutils.dumps(doc)
response = requests.patch(path, headers=headers, data=data)
self.assertEqual(200, response.status_code, response.text)
# Remove all locations of the image then the image size shouldn't be
# able to access
path = self._url('/v2/images/%s' % image2_id)
media_type = 'application/openstack-images-v2.1-json-patch'
headers = self._headers({'content-type': media_type})
doc = [{'op': 'replace', 'path': '/locations', 'value': []}]
data = jsonutils.dumps(doc)
response = requests.patch(path, headers=headers, data=data)
self.assertEqual(200, response.status_code, response.text)
image = jsonutils.loads(response.text)
self.assertIsNone(image['size'])
self.assertIsNone(image['virtual_size'])
self.assertEqual('queued', image['status'])
# Deletion should work. Deleting image-1
path = self._url('/v2/images/%s' % image_id)
response = requests.delete(path, headers=self._headers())
self.assertEqual(204, response.status_code)
# This image should be no longer be directly accessible
path = self._url('/v2/images/%s' % image_id)
response = requests.get(path, headers=self._headers())
self.assertEqual(404, response.status_code)
# And neither should its data
path = self._url('/v2/images/%s/file' % image_id)
headers = self._headers()
response = requests.get(path, headers=headers)
self.assertEqual(404, response.status_code)
# Image list should now contain just image-2
path = self._url('/v2/images')
response = requests.get(path, headers=self._headers())
self.assertEqual(200, response.status_code)
images = jsonutils.loads(response.text)['images']
self.assertEqual(1, len(images))
self.assertEqual(image2_id, images[0]['id'])
# Deleting image-2 should work
path = self._url('/v2/images/%s' % image2_id)
response = requests.delete(path, headers=self._headers())
self.assertEqual(204, response.status_code)
# Image list should now be empty
path = self._url('/v2/images')
response = requests.get(path, headers=self._headers())
self.assertEqual(200, response.status_code)
images = jsonutils.loads(response.text)['images']
self.assertEqual(0, len(images))
self.stop_servers()
def test_download_random_access(self):
self.start_servers(**self.__dict__.copy())
# Create another image (with two deployer-defined properties)
path = self._url('/v2/images')
headers = self._headers({'content-type': 'application/json'})
data = jsonutils.dumps({'name': 'image-2', 'type': 'kernel',
'bar': 'foo', 'disk_format': 'aki',
'container_format': 'aki', 'xyz': 'abc'})
response = requests.post(path, headers=headers, data=data)
self.assertEqual(201, response.status_code)
image = jsonutils.loads(response.text)
image_id = image['id']
# Upload data to image
image_data = 'Z' * 15
path = self._url('/v2/images/%s/file' % image_id)
headers = self._headers({'Content-Type': 'application/octet-stream'})
response = requests.put(path, headers=headers, data=image_data)
self.assertEqual(204, response.status_code)
result_body = ''
for x in range(15):
# NOTE(flaper87): Read just 1 byte. Content-Range is
# 0-indexed and it specifies the first byte to read
# and the last byte to read.
content_range = 'bytes %s-%s/15' % (x, x)
headers = self._headers({'Content-Range': content_range})
path = self._url('/v2/images/%s/file' % image_id)
response = requests.get(path, headers=headers)
result_body += response.text
self.assertEqual(result_body, image_data)
self.stop_servers()
def test_download_policy_when_cache_is_not_enabled(self):
rules = {'context_is_admin': 'role:admin',
'default': '',
'add_image': '',
'get_image': '',
'modify_image': '',
'upload_image': '',
'delete_image': '',
'download_image': '!'}
self.set_policy_rules(rules)
self.start_servers(**self.__dict__.copy())
# Create an image
path = self._url('/v2/images')
headers = self._headers({'content-type': 'application/json',
'X-Roles': 'member'})
data = jsonutils.dumps({'name': 'image-1', 'disk_format': 'aki',
'container_format': 'aki'})
response = requests.post(path, headers=headers, data=data)
self.assertEqual(201, response.status_code)
# Returned image entity
image = jsonutils.loads(response.text)
image_id = image['id']
expected_image = {
'status': 'queued',
'name': 'image-1',
'tags': [],
'visibility': 'private',
'self': '/v2/images/%s' % image_id,
'protected': False,
'file': '/v2/images/%s/file' % image_id,
'min_disk': 0,
'min_ram': 0,
'schema': '/v2/schemas/image',
}
for key, value in six.iteritems(expected_image):
self.assertEqual(value, image[key], key)
# Upload data to image
path = self._url('/v2/images/%s/file' % image_id)
headers = self._headers({'Content-Type': 'application/octet-stream'})
response = requests.put(path, headers=headers, data='ZZZZZ')
self.assertEqual(204, response.status_code)
# Get an image should fail
path = self._url('/v2/images/%s/file' % image_id)
headers = self._headers({'Content-Type': 'application/octet-stream'})
response = requests.get(path, headers=headers)
self.assertEqual(403, response.status_code)
# Image Deletion should work
path = self._url('/v2/images/%s' % image_id)
response = requests.delete(path, headers=self._headers())
self.assertEqual(204, response.status_code)
# This image should be no longer be directly accessible
path = self._url('/v2/images/%s' % image_id)
response = requests.get(path, headers=self._headers())
self.assertEqual(404, response.status_code)
self.stop_servers()
def test_download_image_not_allowed_using_restricted_policy(self):
rules = {
"context_is_admin": "role:admin",
"default": "",
"add_image": "",
"get_image": "",
"modify_image": "",
"upload_image": "",
"delete_image": "",
"restricted":
"not ('aki':%(container_format)s and role:_member_)",
"download_image": "role:admin or rule:restricted"
}
self.set_policy_rules(rules)
self.start_servers(**self.__dict__.copy())
# Create an image
path = self._url('/v2/images')
headers = self._headers({'content-type': 'application/json',
'X-Roles': 'member'})
data = jsonutils.dumps({'name': 'image-1', 'disk_format': 'aki',
'container_format': 'aki'})
response = requests.post(path, headers=headers, data=data)
self.assertEqual(201, response.status_code)
# Returned image entity
image = jsonutils.loads(response.text)
image_id = image['id']
expected_image = {
'status': 'queued',
'name': 'image-1',
'tags': [],
'visibility': 'private',
'self': '/v2/images/%s' % image_id,
'protected': False,
'file': '/v2/images/%s/file' % image_id,
'min_disk': 0,
'min_ram': 0,
'schema': '/v2/schemas/image',
}
for key, value in six.iteritems(expected_image):
self.assertEqual(value, image[key], key)
# Upload data to image
path = self._url('/v2/images/%s/file' % image_id)
headers = self._headers({'Content-Type': 'application/octet-stream'})
response = requests.put(path, headers=headers, data='ZZZZZ')
self.assertEqual(204, response.status_code)
# Get an image should fail
path = self._url('/v2/images/%s/file' % image_id)
headers = self._headers({'Content-Type': 'application/octet-stream',
'X-Roles': '_member_'})
response = requests.get(path, headers=headers)
self.assertEqual(403, response.status_code)
# Image Deletion should work
path = self._url('/v2/images/%s' % image_id)
response = requests.delete(path, headers=self._headers())
self.assertEqual(204, response.status_code)
# This image should be no longer be directly accessible
path = self._url('/v2/images/%s' % image_id)
response = requests.get(path, headers=self._headers())
self.assertEqual(404, response.status_code)
self.stop_servers()
def test_download_image_allowed_using_restricted_policy(self):
rules = {
"context_is_admin": "role:admin",
"default": "",
"add_image": "",
"get_image": "",
"modify_image": "",
"upload_image": "",
"get_image_location": "",
"delete_image": "",
"restricted":
"not ('aki':%(container_format)s and role:_member_)",
"download_image": "role:admin or rule:restricted"
}
self.set_policy_rules(rules)
self.start_servers(**self.__dict__.copy())
# Create an image
path = self._url('/v2/images')
headers = self._headers({'content-type': 'application/json',
'X-Roles': 'member'})
data = jsonutils.dumps({'name': 'image-1', 'disk_format': 'aki',
'container_format': 'aki'})
response = requests.post(path, headers=headers, data=data)
self.assertEqual(201, response.status_code)
# Returned image entity
image = jsonutils.loads(response.text)
image_id = image['id']
expected_image = {
'status': 'queued',
'name': 'image-1',
'tags': [],
'visibility': 'private',
'self': '/v2/images/%s' % image_id,
'protected': False,
'file': '/v2/images/%s/file' % image_id,
'min_disk': 0,
'min_ram': 0,
'schema': '/v2/schemas/image',
}
for key, value in six.iteritems(expected_image):
self.assertEqual(value, value, key)
# Upload data to image
path = self._url('/v2/images/%s/file' % image_id)
headers = self._headers({'Content-Type': 'application/octet-stream'})
response = requests.put(path, headers=headers, data='ZZZZZ')
self.assertEqual(204, response.status_code)
# Get an image should be allowed
path = self._url('/v2/images/%s/file' % image_id)
headers = self._headers({'Content-Type': 'application/octet-stream',
'X-Roles': 'member'})
response = requests.get(path, headers=headers)
self.assertEqual(200, response.status_code)
# Image Deletion should work
path = self._url('/v2/images/%s' % image_id)
response = requests.delete(path, headers=self._headers())
self.assertEqual(204, response.status_code)
# This image should be no longer be directly accessible
path = self._url('/v2/images/%s' % image_id)
response = requests.get(path, headers=self._headers())
self.assertEqual(404, response.status_code)
self.stop_servers()
def test_download_image_raises_service_unavailable(self):
"""Test image download returns HTTPServiceUnavailable."""
self.start_servers(**self.__dict__.copy())
# Create an image
path = self._url('/v2/images')
headers = self._headers({'content-type': 'application/json'})
data = jsonutils.dumps({'name': 'image-1',
'disk_format': 'aki',
'container_format': 'aki'})
response = requests.post(path, headers=headers, data=data)
self.assertEqual(201, response.status_code)
# Get image id
image = jsonutils.loads(response.text)
image_id = image['id']
# Update image locations via PATCH
path = self._url('/v2/images/%s' % image_id)
media_type = 'application/openstack-images-v2.1-json-patch'
headers = self._headers({'content-type': media_type})
http_server_pid, http_port = test_utils.start_http_server(image_id,
"image-1")
values = [{'url': 'http://127.0.0.1:%s/image-1' % http_port,
'metadata': {'idx': '0'}}]
doc = [{'op': 'replace',
'path': '/locations',
'value': values}]
data = jsonutils.dumps(doc)
response = requests.patch(path, headers=headers, data=data)
self.assertEqual(200, response.status_code)
# Download an image should work
path = self._url('/v2/images/%s/file' % image_id)
headers = self._headers({'Content-Type': 'application/json'})
response = requests.get(path, headers=headers)
self.assertEqual(200, response.status_code)
# Stop http server used to update image location
os.kill(http_server_pid, signal.SIGKILL)
# Download an image should raise HTTPServiceUnavailable
path = self._url('/v2/images/%s/file' % image_id)
headers = self._headers({'Content-Type': 'application/json'})
response = requests.get(path, headers=headers)
self.assertEqual(503, response.status_code)
# Image Deletion should work
path = self._url('/v2/images/%s' % image_id)
response = requests.delete(path, headers=self._headers())
self.assertEqual(204, response.status_code)
# This image should be no longer be directly accessible
path = self._url('/v2/images/%s' % image_id)
response = requests.get(path, headers=self._headers())
self.assertEqual(404, response.status_code)
self.stop_servers()
def test_image_modification_works_for_owning_tenant_id(self):
rules = {
"context_is_admin": "role:admin",
"default": "",
"add_image": "",
"get_image": "",
"modify_image": "tenant:%(owner)s",
"upload_image": "",
"get_image_location": "",
"delete_image": "",
"restricted":
"not ('aki':%(container_format)s and role:_member_)",
"download_image": "role:admin or rule:restricted"
}
self.set_policy_rules(rules)
self.start_servers(**self.__dict__.copy())
path = self._url('/v2/images')
headers = self._headers({'content-type': 'application/json',
'X-Roles': 'admin'})
data = jsonutils.dumps({'name': 'image-1', 'disk_format': 'aki',
'container_format': 'aki'})
response = requests.post(path, headers=headers, data=data)
self.assertEqual(201, response.status_code)
# Get the image's ID
image = jsonutils.loads(response.text)
image_id = image['id']
path = self._url('/v2/images/%s' % image_id)
media_type = 'application/openstack-images-v2.1-json-patch'
headers['content-type'] = media_type
del headers['X-Roles']
data = jsonutils.dumps([
{'op': 'replace', 'path': '/name', 'value': 'new-name'},
])
response = requests.patch(path, headers=headers, data=data)
self.assertEqual(200, response.status_code)
self.stop_servers()
def test_image_modification_fails_on_mismatched_tenant_ids(self):
rules = {
"context_is_admin": "role:admin",
"default": "",
"add_image": "",
"get_image": "",
"modify_image": "'A-Fake-Tenant-Id':%(owner)s",
"upload_image": "",
"get_image_location": "",
"delete_image": "",
"restricted":
"not ('aki':%(container_format)s and role:_member_)",
"download_image": "role:admin or rule:restricted"
}
self.set_policy_rules(rules)
self.start_servers(**self.__dict__.copy())
path = self._url('/v2/images')
headers = self._headers({'content-type': 'application/json',
'X-Roles': 'admin'})
data = jsonutils.dumps({'name': 'image-1', 'disk_format': 'aki',
'container_format': 'aki'})
response = requests.post(path, headers=headers, data=data)
self.assertEqual(201, response.status_code)
# Get the image's ID
image = jsonutils.loads(response.text)
image_id = image['id']
path = self._url('/v2/images/%s' % image_id)
media_type = 'application/openstack-images-v2.1-json-patch'
headers['content-type'] = media_type
del headers['X-Roles']
data = jsonutils.dumps([
{'op': 'replace', 'path': '/name', 'value': 'new-name'},
])
response = requests.patch(path, headers=headers, data=data)
self.assertEqual(403, response.status_code)
self.stop_servers()
def test_member_additions_works_for_owning_tenant_id(self):
rules = {
"context_is_admin": "role:admin",
"default": "",
"add_image": "",
"get_image": "",
"modify_image": "",
"upload_image": "",
"get_image_location": "",
"delete_image": "",
"restricted":
"not ('aki':%(container_format)s and role:_member_)",
"download_image": "role:admin or rule:restricted",
"add_member": "tenant:%(owner)s",
}
self.set_policy_rules(rules)
self.start_servers(**self.__dict__.copy())
path = self._url('/v2/images')
headers = self._headers({'content-type': 'application/json',
'X-Roles': 'admin'})
data = jsonutils.dumps({'name': 'image-1', 'disk_format': 'aki',
'container_format': 'aki'})
response = requests.post(path, headers=headers, data=data)
self.assertEqual(201, response.status_code)
# Get the image's ID
image = jsonutils.loads(response.text)
image_id = image['id']
# Get the image's members resource
path = self._url('/v2/images/%s/members' % image_id)
body = jsonutils.dumps({'member': TENANT3})
del headers['X-Roles']
response = requests.post(path, headers=headers, data=body)
self.assertEqual(200, response.status_code)
self.stop_servers()
def test_image_additions_works_only_for_specific_tenant_id(self):
rules = {
"context_is_admin": "role:admin",
"default": "",
"add_image": "'{0}':%(owner)s".format(TENANT1),
"get_image": "",
"modify_image": "",
"upload_image": "",
"get_image_location": "",
"delete_image": "",
"restricted":
"not ('aki':%(container_format)s and role:_member_)",
"download_image": "role:admin or rule:restricted",
"add_member": "",
}
self.set_policy_rules(rules)
self.start_servers(**self.__dict__.copy())
path = self._url('/v2/images')
headers = self._headers({'content-type': 'application/json',
'X-Roles': 'admin', 'X-Tenant-Id': TENANT1})
data = jsonutils.dumps({'name': 'image-1', 'disk_format': 'aki',
'container_format': 'aki'})
response = requests.post(path, headers=headers, data=data)
self.assertEqual(201, response.status_code)
headers['X-Tenant-Id'] = TENANT2
response = requests.post(path, headers=headers, data=data)
self.assertEqual(403, response.status_code)
self.stop_servers()
def test_owning_tenant_id_can_retrieve_image_information(self):
rules = {
"context_is_admin": "role:admin",
"default": "",
"add_image": "",
"get_image": "tenant:%(owner)s",
"modify_image": "",
"upload_image": "",
"get_image_location": "",
"delete_image": "",
"restricted":
"not ('aki':%(container_format)s and role:_member_)",
"download_image": "role:admin or rule:restricted",
"add_member": "",
}
self.set_policy_rules(rules)
self.start_servers(**self.__dict__.copy())
path = self._url('/v2/images')
headers = self._headers({'content-type': 'application/json',
'X-Roles': 'admin', 'X-Tenant-Id': TENANT1})
data = jsonutils.dumps({'name': 'image-1', 'disk_format': 'aki',
'container_format': 'aki'})
response = requests.post(path, headers=headers, data=data)
self.assertEqual(201, response.status_code)
# Remove the admin role
del headers['X-Roles']
# Get the image's ID
image = jsonutils.loads(response.text)
image_id = image['id']
# Can retrieve the image as TENANT1
path = self._url('/v2/images/%s' % image_id)
response = requests.get(path, headers=headers)
self.assertEqual(200, response.status_code)
# Can retrieve the image's members as TENANT1
path = self._url('/v2/images/%s/members' % image_id)
response = requests.get(path, headers=headers)
self.assertEqual(200, response.status_code)
headers['X-Tenant-Id'] = TENANT2
response = requests.get(path, headers=headers)
self.assertEqual(403, response.status_code)
self.stop_servers()
def test_owning_tenant_can_publicize_image(self):
rules = {
"context_is_admin": "role:admin",
"default": "",
"add_image": "",
"publicize_image": "tenant:%(owner)s",
"get_image": "tenant:%(owner)s",
"modify_image": "",
"upload_image": "",
"get_image_location": "",
"delete_image": "",
"restricted":
"not ('aki':%(container_format)s and role:_member_)",
"download_image": "role:admin or rule:restricted",
"add_member": "",
}
self.set_policy_rules(rules)
self.start_servers(**self.__dict__.copy())
path = self._url('/v2/images')
headers = self._headers({'content-type': 'application/json',
'X-Roles': 'admin', 'X-Tenant-Id': TENANT1})
data = jsonutils.dumps({'name': 'image-1', 'disk_format': 'aki',
'container_format': 'aki'})
response = requests.post(path, headers=headers, data=data)
self.assertEqual(201, response.status_code)
# Get the image's ID
image = jsonutils.loads(response.text)
image_id = image['id']
path = self._url('/v2/images/%s' % image_id)
headers = self._headers({
'Content-Type': 'application/openstack-images-v2.1-json-patch',
'X-Tenant-Id': TENANT1,
})
doc = [{'op': 'replace', 'path': '/visibility', 'value': 'public'}]
data = jsonutils.dumps(doc)
response = requests.patch(path, headers=headers, data=data)
self.assertEqual(200, response.status_code)
def test_owning_tenant_can_delete_image(self):
rules = {
"context_is_admin": "role:admin",
"default": "",
"add_image": "",
"publicize_image": "tenant:%(owner)s",
"get_image": "tenant:%(owner)s",
"modify_image": "",
"upload_image": "",
"get_image_location": "",
"delete_image": "",
"restricted":
"not ('aki':%(container_format)s and role:_member_)",
"download_image": "role:admin or rule:restricted",
"add_member": "",
}
self.set_policy_rules(rules)
self.start_servers(**self.__dict__.copy())
path = self._url('/v2/images')
headers = self._headers({'content-type': 'application/json',
'X-Roles': 'admin', 'X-Tenant-Id': TENANT1})
data = jsonutils.dumps({'name': 'image-1', 'disk_format': 'aki',
'container_format': 'aki'})
response = requests.post(path, headers=headers, data=data)
self.assertEqual(201, response.status_code)
# Get the image's ID
image = jsonutils.loads(response.text)
image_id = image['id']
path = self._url('/v2/images/%s' % image_id)
response = requests.delete(path, headers=headers)
self.assertEqual(204, response.status_code)
def test_image_size_cap(self):
self.api_server.image_size_cap = 128
self.start_servers(**self.__dict__.copy())
# create an image
path = self._url('/v2/images')
headers = self._headers({'content-type': 'application/json'})
data = jsonutils.dumps({'name': 'image-size-cap-test-image',
'type': 'kernel', 'disk_format': 'aki',
'container_format': 'aki'})
response = requests.post(path, headers=headers, data=data)
self.assertEqual(201, response.status_code)
image = jsonutils.loads(response.text)
image_id = image['id']
# try to populate it with oversized data
path = self._url('/v2/images/%s/file' % image_id)
headers = self._headers({'Content-Type': 'application/octet-stream'})
class StreamSim(object):
# Using a one-shot iterator to force chunked transfer in the PUT
# request
def __init__(self, size):
self.size = size
def __iter__(self):
yield 'Z' * self.size
response = requests.put(path, headers=headers, data=StreamSim(
self.api_server.image_size_cap + 1))
self.assertEqual(413, response.status_code)
# hashlib.md5('Z'*129).hexdigest()
# == '76522d28cb4418f12704dfa7acd6e7ee'
# If the image has this checksum, it means that the whole stream was
# accepted and written to the store, which should not be the case.
path = self._url('/v2/images/{0}'.format(image_id))
headers = self._headers({'content-type': 'application/json'})
response = requests.get(path, headers=headers)
image_checksum = jsonutils.loads(response.text).get('checksum')
self.assertNotEqual(image_checksum, '76522d28cb4418f12704dfa7acd6e7ee')
def test_permissions(self):
self.start_servers(**self.__dict__.copy())
# Create an image that belongs to TENANT1
path = self._url('/v2/images')
headers = self._headers({'Content-Type': 'application/json'})
data = jsonutils.dumps({'name': 'image-1', 'disk_format': 'raw',
'container_format': 'bare'})
response = requests.post(path, headers=headers, data=data)
self.assertEqual(201, response.status_code)
image_id = jsonutils.loads(response.text)['id']
# Upload some image data
path = self._url('/v2/images/%s/file' % image_id)
headers = self._headers({'Content-Type': 'application/octet-stream'})
response = requests.put(path, headers=headers, data='ZZZZZ')
self.assertEqual(204, response.status_code)
# TENANT1 should see the image in their list
path = self._url('/v2/images')
response = requests.get(path, headers=self._headers())
self.assertEqual(200, response.status_code)
images = jsonutils.loads(response.text)['images']
self.assertEqual(image_id, images[0]['id'])
# TENANT1 should be able to access the image directly
path = self._url('/v2/images/%s' % image_id)
response = requests.get(path, headers=self._headers())
self.assertEqual(200, response.status_code)
# TENANT2 should not see the image in their list
path = self._url('/v2/images')
headers = self._headers({'X-Tenant-Id': TENANT2})
response = requests.get(path, headers=headers)
self.assertEqual(200, response.status_code)
images = jsonutils.loads(response.text)['images']
self.assertEqual(0, len(images))
# TENANT2 should not be able to access the image directly
path = self._url('/v2/images/%s' % image_id)
headers = self._headers({'X-Tenant-Id': TENANT2})
response = requests.get(path, headers=headers)
self.assertEqual(404, response.status_code)
# TENANT2 should not be able to modify the image, either
path = self._url('/v2/images/%s' % image_id)
headers = self._headers({
'Content-Type': 'application/openstack-images-v2.1-json-patch',
'X-Tenant-Id': TENANT2,
})
doc = [{'op': 'replace', 'path': '/name', 'value': 'image-2'}]
data = jsonutils.dumps(doc)
response = requests.patch(path, headers=headers, data=data)
self.assertEqual(404, response.status_code)
# TENANT2 should not be able to delete the image, either
path = self._url('/v2/images/%s' % image_id)
headers = self._headers({'X-Tenant-Id': TENANT2})
response = requests.delete(path, headers=headers)
self.assertEqual(404, response.status_code)
# Publicize the image as an admin of TENANT1
path = self._url('/v2/images/%s' % image_id)
headers = self._headers({
'Content-Type': 'application/openstack-images-v2.1-json-patch',
'X-Roles': 'admin',
})
doc = [{'op': 'replace', 'path': '/visibility', 'value': 'public'}]
data = jsonutils.dumps(doc)
response = requests.patch(path, headers=headers, data=data)
self.assertEqual(200, response.status_code)
# TENANT3 should now see the image in their list
path = self._url('/v2/images')
headers = self._headers({'X-Tenant-Id': TENANT3})
response = requests.get(path, headers=headers)
self.assertEqual(200, response.status_code)
images = jsonutils.loads(response.text)['images']
self.assertEqual(image_id, images[0]['id'])
# TENANT3 should also be able to access the image directly
path = self._url('/v2/images/%s' % image_id)
headers = self._headers({'X-Tenant-Id': TENANT3})
response = requests.get(path, headers=headers)
self.assertEqual(200, response.status_code)
# TENANT3 still should not be able to modify the image
path = self._url('/v2/images/%s' % image_id)
headers = self._headers({
'Content-Type': 'application/openstack-images-v2.1-json-patch',
'X-Tenant-Id': TENANT3,
})
doc = [{'op': 'replace', 'path': '/name', 'value': 'image-2'}]
data = jsonutils.dumps(doc)
response = requests.patch(path, headers=headers, data=data)
self.assertEqual(403, response.status_code)
# TENANT3 should not be able to delete the image, either
path = self._url('/v2/images/%s' % image_id)
headers = self._headers({'X-Tenant-Id': TENANT3})
response = requests.delete(path, headers=headers)
self.assertEqual(403, response.status_code)
# Image data should still be present after the failed delete
path = self._url('/v2/images/%s/file' % image_id)
response = requests.get(path, headers=self._headers())
self.assertEqual(200, response.status_code)
self.assertEqual(response.text, 'ZZZZZ')
self.stop_servers()
def test_property_protections_with_roles(self):
# Enable property protection
self.api_server.property_protection_file = self.property_file_roles
self.start_servers(**self.__dict__.copy())
# Image list should be empty
path = self._url('/v2/images')
response = requests.get(path, headers=self._headers())
self.assertEqual(200, response.status_code)
images = jsonutils.loads(response.text)['images']
self.assertEqual(0, len(images))
# Create an image for role member with extra props
# Raises 403 since user is not allowed to set 'foo'
path = self._url('/v2/images')
headers = self._headers({'content-type': 'application/json',
'X-Roles': 'member'})
data = jsonutils.dumps({'name': 'image-1', 'foo': 'bar',
'disk_format': 'aki',
'container_format': 'aki',
'x_owner_foo': 'o_s_bar'})
response = requests.post(path, headers=headers, data=data)
self.assertEqual(403, response.status_code)
# Create an image for role member without 'foo'
path = self._url('/v2/images')
headers = self._headers({'content-type': 'application/json',
'X-Roles': 'member'})
data = jsonutils.dumps({'name': 'image-1', 'disk_format': 'aki',
'container_format': 'aki',
'x_owner_foo': 'o_s_bar'})
response = requests.post(path, headers=headers, data=data)
self.assertEqual(201, response.status_code)
# Returned image entity should have 'x_owner_foo'
image = jsonutils.loads(response.text)
image_id = image['id']
expected_image = {
'status': 'queued',
'name': 'image-1',
'tags': [],
'visibility': 'private',
'self': '/v2/images/%s' % image_id,
'protected': False,
'file': '/v2/images/%s/file' % image_id,
'min_disk': 0,
'x_owner_foo': 'o_s_bar',
'min_ram': 0,
'schema': '/v2/schemas/image',
}
for key, value in expected_image.items():
self.assertEqual(value, image[key], key)
# Create an image for role spl_role with extra props
path = self._url('/v2/images')
headers = self._headers({'content-type': 'application/json',
'X-Roles': 'spl_role'})
data = jsonutils.dumps({'name': 'image-1',
'disk_format': 'aki',
'container_format': 'aki',
'spl_create_prop': 'create_bar',
'spl_create_prop_policy': 'create_policy_bar',
'spl_read_prop': 'read_bar',
'spl_update_prop': 'update_bar',
'spl_delete_prop': 'delete_bar',
'spl_delete_empty_prop': ''})
response = requests.post(path, headers=headers, data=data)
self.assertEqual(201, response.status_code)
image = jsonutils.loads(response.text)
image_id = image['id']
# Attempt to replace, add and remove properties which are forbidden
path = self._url('/v2/images/%s' % image_id)
media_type = 'application/openstack-images-v2.1-json-patch'
headers = self._headers({'content-type': media_type,
'X-Roles': 'spl_role'})
data = jsonutils.dumps([
{'op': 'replace', 'path': '/spl_read_prop', 'value': 'r'},
{'op': 'replace', 'path': '/spl_update_prop', 'value': 'u'},
])
response = requests.patch(path, headers=headers, data=data)
self.assertEqual(403, response.status_code, response.text)
# Attempt to replace, add and remove properties which are forbidden
path = self._url('/v2/images/%s' % image_id)
media_type = 'application/openstack-images-v2.1-json-patch'
headers = self._headers({'content-type': media_type,
'X-Roles': 'spl_role'})
data = jsonutils.dumps([
{'op': 'add', 'path': '/spl_new_prop', 'value': 'new'},
{'op': 'remove', 'path': '/spl_create_prop'},
{'op': 'remove', 'path': '/spl_delete_prop'},
])
response = requests.patch(path, headers=headers, data=data)
self.assertEqual(403, response.status_code, response.text)
# Attempt to replace properties
path = self._url('/v2/images/%s' % image_id)
media_type = 'application/openstack-images-v2.1-json-patch'
headers = self._headers({'content-type': media_type,
'X-Roles': 'spl_role'})
data = jsonutils.dumps([
# Updating an empty property to verify bug #1332103.
{'op': 'replace', 'path': '/spl_update_prop', 'value': ''},
{'op': 'replace', 'path': '/spl_update_prop', 'value': 'u'},
])
response = requests.patch(path, headers=headers, data=data)
self.assertEqual(200, response.status_code, response.text)
# Returned image entity should reflect the changes
image = jsonutils.loads(response.text)
# 'spl_update_prop' has update permission for spl_role
# hence the value has changed
self.assertEqual('u', image['spl_update_prop'])
# Attempt to remove properties
path = self._url('/v2/images/%s' % image_id)
media_type = 'application/openstack-images-v2.1-json-patch'
headers = self._headers({'content-type': media_type,
'X-Roles': 'spl_role'})
data = jsonutils.dumps([
{'op': 'remove', 'path': '/spl_delete_prop'},
# Deleting an empty property to verify bug #1332103.
{'op': 'remove', 'path': '/spl_delete_empty_prop'},
])
response = requests.patch(path, headers=headers, data=data)
self.assertEqual(200, response.status_code, response.text)
# Returned image entity should reflect the changes
image = jsonutils.loads(response.text)
# 'spl_delete_prop' and 'spl_delete_empty_prop' have delete
# permission for spl_role hence the property has been deleted
self.assertNotIn('spl_delete_prop', image.keys())
self.assertNotIn('spl_delete_empty_prop', image.keys())
# Image Deletion should work
path = self._url('/v2/images/%s' % image_id)
response = requests.delete(path, headers=self._headers())
self.assertEqual(204, response.status_code)
# This image should be no longer be directly accessible
path = self._url('/v2/images/%s' % image_id)
response = requests.get(path, headers=self._headers())
self.assertEqual(404, response.status_code)
self.stop_servers()
def test_property_protections_with_policies(self):
# Enable property protection
self.api_server.property_protection_file = self.property_file_policies
self.api_server.property_protection_rule_format = 'policies'
self.start_servers(**self.__dict__.copy())
# Image list should be empty
path = self._url('/v2/images')
response = requests.get(path, headers=self._headers())
self.assertEqual(200, response.status_code)
images = jsonutils.loads(response.text)['images']
self.assertEqual(0, len(images))
# Create an image for role member with extra props
# Raises 403 since user is not allowed to set 'foo'
path = self._url('/v2/images')
headers = self._headers({'content-type': 'application/json',
'X-Roles': 'member'})
data = jsonutils.dumps({'name': 'image-1', 'foo': 'bar',
'disk_format': 'aki',
'container_format': 'aki',
'x_owner_foo': 'o_s_bar'})
response = requests.post(path, headers=headers, data=data)
self.assertEqual(403, response.status_code)
# Create an image for role member without 'foo'
path = self._url('/v2/images')
headers = self._headers({'content-type': 'application/json',
'X-Roles': 'member'})
data = jsonutils.dumps({'name': 'image-1', 'disk_format': 'aki',
'container_format': 'aki'})
response = requests.post(path, headers=headers, data=data)
self.assertEqual(201, response.status_code)
# Returned image entity
image = jsonutils.loads(response.text)
image_id = image['id']
expected_image = {
'status': 'queued',
'name': 'image-1',
'tags': [],
'visibility': 'private',
'self': '/v2/images/%s' % image_id,
'protected': False,
'file': '/v2/images/%s/file' % image_id,
'min_disk': 0,
'min_ram': 0,
'schema': '/v2/schemas/image',
}
for key, value in expected_image.items():
self.assertEqual(value, image[key], key)
# Create an image for role spl_role with extra props
path = self._url('/v2/images')
headers = self._headers({'content-type': 'application/json',
'X-Roles': 'spl_role, admin'})
data = jsonutils.dumps({'name': 'image-1',
'disk_format': 'aki',
'container_format': 'aki',
'spl_creator_policy': 'creator_bar',
'spl_default_policy': 'default_bar'})
response = requests.post(path, headers=headers, data=data)
self.assertEqual(201, response.status_code)
image = jsonutils.loads(response.text)
image_id = image['id']
self.assertEqual('creator_bar', image['spl_creator_policy'])
self.assertEqual('default_bar', image['spl_default_policy'])
# Attempt to replace a property which is permitted
path = self._url('/v2/images/%s' % image_id)
media_type = 'application/openstack-images-v2.1-json-patch'
headers = self._headers({'content-type': media_type,
'X-Roles': 'admin'})
data = jsonutils.dumps([
# Updating an empty property to verify bug #1332103.
{'op': 'replace', 'path': '/spl_creator_policy', 'value': ''},
{'op': 'replace', 'path': '/spl_creator_policy', 'value': 'r'},
])
response = requests.patch(path, headers=headers, data=data)
self.assertEqual(200, response.status_code, response.text)
# Returned image entity should reflect the changes
image = jsonutils.loads(response.text)
# 'spl_creator_policy' has update permission for admin
# hence the value has changed
self.assertEqual('r', image['spl_creator_policy'])
# Attempt to replace a property which is forbidden
path = self._url('/v2/images/%s' % image_id)
media_type = 'application/openstack-images-v2.1-json-patch'
headers = self._headers({'content-type': media_type,
'X-Roles': 'spl_role'})
data = jsonutils.dumps([
{'op': 'replace', 'path': '/spl_creator_policy', 'value': 'z'},
])
response = requests.patch(path, headers=headers, data=data)
self.assertEqual(403, response.status_code, response.text)
# Attempt to read properties
path = self._url('/v2/images/%s' % image_id)
headers = self._headers({'content-type': media_type,
'X-Roles': 'random_role'})
response = requests.get(path, headers=headers)
self.assertEqual(200, response.status_code)
image = jsonutils.loads(response.text)
# 'random_role' is allowed read 'spl_default_policy'.
self.assertEqual(image['spl_default_policy'], 'default_bar')
# 'random_role' is forbidden to read 'spl_creator_policy'.
self.assertNotIn('spl_creator_policy', image)
# Attempt to replace and remove properties which are permitted
path = self._url('/v2/images/%s' % image_id)
media_type = 'application/openstack-images-v2.1-json-patch'
headers = self._headers({'content-type': media_type,
'X-Roles': 'admin'})
data = jsonutils.dumps([
# Deleting an empty property to verify bug #1332103.
{'op': 'replace', 'path': '/spl_creator_policy', 'value': ''},
{'op': 'remove', 'path': '/spl_creator_policy'},
])
response = requests.patch(path, headers=headers, data=data)
self.assertEqual(200, response.status_code, response.text)
# Returned image entity should reflect the changes
image = jsonutils.loads(response.text)
# 'spl_creator_policy' has delete permission for admin
# hence the value has been deleted
self.assertNotIn('spl_creator_policy', image)
# Attempt to read a property that is permitted
path = self._url('/v2/images/%s' % image_id)
headers = self._headers({'content-type': media_type,
'X-Roles': 'random_role'})
response = requests.get(path, headers=headers)
self.assertEqual(200, response.status_code)
# Returned image entity should reflect the changes
image = jsonutils.loads(response.text)
self.assertEqual(image['spl_default_policy'], 'default_bar')
# Image Deletion should work
path = self._url('/v2/images/%s' % image_id)
response = requests.delete(path, headers=self._headers())
self.assertEqual(204, response.status_code)
# This image should be no longer be directly accessible
path = self._url('/v2/images/%s' % image_id)
response = requests.get(path, headers=self._headers())
self.assertEqual(404, response.status_code)
self.stop_servers()
def test_property_protections_special_chars_roles(self):
# Enable property protection
self.api_server.property_protection_file = self.property_file_roles
self.start_servers(**self.__dict__.copy())
# Verify both admin and unknown role can create properties marked with
# '@'
path = self._url('/v2/images')
headers = self._headers({'content-type': 'application/json',
'X-Roles': 'admin'})
data = jsonutils.dumps({
'name': 'image-1',
'disk_format': 'aki',
'container_format': 'aki',
'x_all_permitted_admin': '1'
})
response = requests.post(path, headers=headers, data=data)
self.assertEqual(201, response.status_code)
image = jsonutils.loads(response.text)
image_id = image['id']
expected_image = {
'status': 'queued',
'name': 'image-1',
'tags': [],
'visibility': 'private',
'self': '/v2/images/%s' % image_id,
'protected': False,
'file': '/v2/images/%s/file' % image_id,
'min_disk': 0,
'x_all_permitted_admin': '1',
'min_ram': 0,
'schema': '/v2/schemas/image',
}
for key, value in expected_image.items():
self.assertEqual(value, image[key], key)
path = self._url('/v2/images')
headers = self._headers({'content-type': 'application/json',
'X-Roles': 'joe_soap'})
data = jsonutils.dumps({
'name': 'image-1',
'disk_format': 'aki',
'container_format': 'aki',
'x_all_permitted_joe_soap': '1'
})
response = requests.post(path, headers=headers, data=data)
self.assertEqual(201, response.status_code)
image = jsonutils.loads(response.text)
image_id = image['id']
expected_image = {
'status': 'queued',
'name': 'image-1',
'tags': [],
'visibility': 'private',
'self': '/v2/images/%s' % image_id,
'protected': False,
'file': '/v2/images/%s/file' % image_id,
'min_disk': 0,
'x_all_permitted_joe_soap': '1',
'min_ram': 0,
'schema': '/v2/schemas/image',
}
for key, value in expected_image.items():
self.assertEqual(value, image[key], key)
# Verify both admin and unknown role can read properties marked with
# '@'
headers = self._headers({'content-type': 'application/json',
'X-Roles': 'admin'})
path = self._url('/v2/images/%s' % image_id)
response = requests.get(path, headers=self._headers())
self.assertEqual(200, response.status_code)
image = jsonutils.loads(response.text)
self.assertEqual('1', image['x_all_permitted_joe_soap'])
headers = self._headers({'content-type': 'application/json',
'X-Roles': 'joe_soap'})
path = self._url('/v2/images/%s' % image_id)
response = requests.get(path, headers=self._headers())
self.assertEqual(200, response.status_code)
image = jsonutils.loads(response.text)
self.assertEqual('1', image['x_all_permitted_joe_soap'])
# Verify both admin and unknown role can update properties marked with
# '@'
path = self._url('/v2/images/%s' % image_id)
media_type = 'application/openstack-images-v2.1-json-patch'
headers = self._headers({'content-type': media_type,
'X-Roles': 'admin'})
data = jsonutils.dumps([
{'op': 'replace',
'path': '/x_all_permitted_joe_soap', 'value': '2'}
])
response = requests.patch(path, headers=headers, data=data)
self.assertEqual(200, response.status_code, response.text)
image = jsonutils.loads(response.text)
self.assertEqual('2', image['x_all_permitted_joe_soap'])
path = self._url('/v2/images/%s' % image_id)
media_type = 'application/openstack-images-v2.1-json-patch'
headers = self._headers({'content-type': media_type,
'X-Roles': 'joe_soap'})
data = jsonutils.dumps([
{'op': 'replace',
'path': '/x_all_permitted_joe_soap', 'value': '3'}
])
response = requests.patch(path, headers=headers, data=data)
self.assertEqual(200, response.status_code, response.text)
image = jsonutils.loads(response.text)
self.assertEqual('3', image['x_all_permitted_joe_soap'])
# Verify both admin and unknown role can delete properties marked with
# '@'
path = self._url('/v2/images')
headers = self._headers({'content-type': 'application/json',
'X-Roles': 'admin'})
data = jsonutils.dumps({
'name': 'image-1',
'disk_format': 'aki',
'container_format': 'aki',
'x_all_permitted_a': '1',
'x_all_permitted_b': '2'
})
response = requests.post(path, headers=headers, data=data)
self.assertEqual(201, response.status_code)
image = jsonutils.loads(response.text)
image_id = image['id']
path = self._url('/v2/images/%s' % image_id)
media_type = 'application/openstack-images-v2.1-json-patch'
headers = self._headers({'content-type': media_type,
'X-Roles': 'admin'})
data = jsonutils.dumps([
{'op': 'remove', 'path': '/x_all_permitted_a'}
])
response = requests.patch(path, headers=headers, data=data)
self.assertEqual(200, response.status_code, response.text)
image = jsonutils.loads(response.text)
self.assertNotIn('x_all_permitted_a', image.keys())
path = self._url('/v2/images/%s' % image_id)
media_type = 'application/openstack-images-v2.1-json-patch'
headers = self._headers({'content-type': media_type,
'X-Roles': 'joe_soap'})
data = jsonutils.dumps([
{'op': 'remove', 'path': '/x_all_permitted_b'}
])
response = requests.patch(path, headers=headers, data=data)
self.assertEqual(200, response.status_code, response.text)
image = jsonutils.loads(response.text)
self.assertNotIn('x_all_permitted_b', image.keys())
# Verify neither admin nor unknown role can create a property protected
# with '!'
path = self._url('/v2/images')
headers = self._headers({'content-type': 'application/json',
'X-Roles': 'admin'})
data = jsonutils.dumps({
'name': 'image-1',
'disk_format': 'aki',
'container_format': 'aki',
'x_none_permitted_admin': '1'
})
response = requests.post(path, headers=headers, data=data)
self.assertEqual(403, response.status_code)
path = self._url('/v2/images')
headers = self._headers({'content-type': 'application/json',
'X-Roles': 'joe_soap'})
data = jsonutils.dumps({
'name': 'image-1',
'disk_format': 'aki',
'container_format': 'aki',
'x_none_permitted_joe_soap': '1'
})
response = requests.post(path, headers=headers, data=data)
self.assertEqual(403, response.status_code)
# Verify neither admin nor unknown role can read properties marked with
# '!'
path = self._url('/v2/images')
headers = self._headers({'content-type': 'application/json',
'X-Roles': 'admin'})
data = jsonutils.dumps({
'name': 'image-1',
'disk_format': 'aki',
'container_format': 'aki',
'x_none_read': '1'
})
response = requests.post(path, headers=headers, data=data)
self.assertEqual(201, response.status_code)
image = jsonutils.loads(response.text)
image_id = image['id']
self.assertNotIn('x_none_read', image.keys())
headers = self._headers({'content-type': 'application/json',
'X-Roles': 'admin'})
path = self._url('/v2/images/%s' % image_id)
response = requests.get(path, headers=self._headers())
self.assertEqual(200, response.status_code)
image = jsonutils.loads(response.text)
self.assertNotIn('x_none_read', image.keys())
headers = self._headers({'content-type': 'application/json',
'X-Roles': 'joe_soap'})
path = self._url('/v2/images/%s' % image_id)
response = requests.get(path, headers=self._headers())
self.assertEqual(200, response.status_code)
image = jsonutils.loads(response.text)
self.assertNotIn('x_none_read', image.keys())
# Verify neither admin nor unknown role can update properties marked
# with '!'
path = self._url('/v2/images')
headers = self._headers({'content-type': 'application/json',
'X-Roles': 'admin'})
data = jsonutils.dumps({
'name': 'image-1',
'disk_format': 'aki',
'container_format': 'aki',
'x_none_update': '1'
})
response = requests.post(path, headers=headers, data=data)
self.assertEqual(201, response.status_code)
image = jsonutils.loads(response.text)
image_id = image['id']
self.assertEqual('1', image['x_none_update'])
path = self._url('/v2/images/%s' % image_id)
media_type = 'application/openstack-images-v2.1-json-patch'
headers = self._headers({'content-type': media_type,
'X-Roles': 'admin'})
data = jsonutils.dumps([
{'op': 'replace',
'path': '/x_none_update', 'value': '2'}
])
response = requests.patch(path, headers=headers, data=data)
self.assertEqual(403, response.status_code, response.text)
path = self._url('/v2/images/%s' % image_id)
media_type = 'application/openstack-images-v2.1-json-patch'
headers = self._headers({'content-type': media_type,
'X-Roles': 'joe_soap'})
data = jsonutils.dumps([
{'op': 'replace',
'path': '/x_none_update', 'value': '3'}
])
response = requests.patch(path, headers=headers, data=data)
self.assertEqual(409, response.status_code, response.text)
# Verify neither admin nor unknown role can delete properties marked
# with '!'
path = self._url('/v2/images')
headers = self._headers({'content-type': 'application/json',
'X-Roles': 'admin'})
data = jsonutils.dumps({
'name': 'image-1',
'disk_format': 'aki',
'container_format': 'aki',
'x_none_delete': '1',
})
response = requests.post(path, headers=headers, data=data)
self.assertEqual(201, response.status_code)
image = jsonutils.loads(response.text)
image_id = image['id']
path = self._url('/v2/images/%s' % image_id)
media_type = 'application/openstack-images-v2.1-json-patch'
headers = self._headers({'content-type': media_type,
'X-Roles': 'admin'})
data = jsonutils.dumps([
{'op': 'remove', 'path': '/x_none_delete'}
])
response = requests.patch(path, headers=headers, data=data)
self.assertEqual(403, response.status_code, response.text)
path = self._url('/v2/images/%s' % image_id)
media_type = 'application/openstack-images-v2.1-json-patch'
headers = self._headers({'content-type': media_type,
'X-Roles': 'joe_soap'})
data = jsonutils.dumps([
{'op': 'remove', 'path': '/x_none_delete'}
])
response = requests.patch(path, headers=headers, data=data)
self.assertEqual(409, response.status_code, response.text)
self.stop_servers()
def test_property_protections_special_chars_policies(self):
# Enable property protection
self.api_server.property_protection_file = self.property_file_policies
self.api_server.property_protection_rule_format = 'policies'
self.start_servers(**self.__dict__.copy())
# Verify both admin and unknown role can create properties marked with
# '@'
path = self._url('/v2/images')
headers = self._headers({'content-type': 'application/json',
'X-Roles': 'admin'})
data = jsonutils.dumps({
'name': 'image-1',
'disk_format': 'aki',
'container_format': 'aki',
'x_all_permitted_admin': '1'
})
response = requests.post(path, headers=headers, data=data)
self.assertEqual(201, response.status_code)
image = jsonutils.loads(response.text)
image_id = image['id']
expected_image = {
'status': 'queued',
'name': 'image-1',
'tags': [],
'visibility': 'private',
'self': '/v2/images/%s' % image_id,
'protected': False,
'file': '/v2/images/%s/file' % image_id,
'min_disk': 0,
'x_all_permitted_admin': '1',
'min_ram': 0,
'schema': '/v2/schemas/image',
}
for key, value in expected_image.items():
self.assertEqual(value, image[key], key)
path = self._url('/v2/images')
headers = self._headers({'content-type': 'application/json',
'X-Roles': 'joe_soap'})
data = jsonutils.dumps({
'name': 'image-1',
'disk_format': 'aki',
'container_format': 'aki',
'x_all_permitted_joe_soap': '1'
})
response = requests.post(path, headers=headers, data=data)
self.assertEqual(201, response.status_code)
image = jsonutils.loads(response.text)
image_id = image['id']
expected_image = {
'status': 'queued',
'name': 'image-1',
'tags': [],
'visibility': 'private',
'self': '/v2/images/%s' % image_id,
'protected': False,
'file': '/v2/images/%s/file' % image_id,
'min_disk': 0,
'x_all_permitted_joe_soap': '1',
'min_ram': 0,
'schema': '/v2/schemas/image',
}
for key, value in expected_image.items():
self.assertEqual(value, image[key], key)
# Verify both admin and unknown role can read properties marked with
# '@'
headers = self._headers({'content-type': 'application/json',
'X-Roles': 'admin'})
path = self._url('/v2/images/%s' % image_id)
response = requests.get(path, headers=self._headers())
self.assertEqual(200, response.status_code)
image = jsonutils.loads(response.text)
self.assertEqual('1', image['x_all_permitted_joe_soap'])
headers = self._headers({'content-type': 'application/json',
'X-Roles': 'joe_soap'})
path = self._url('/v2/images/%s' % image_id)
response = requests.get(path, headers=self._headers())
self.assertEqual(200, response.status_code)
image = jsonutils.loads(response.text)
self.assertEqual('1', image['x_all_permitted_joe_soap'])
# Verify both admin and unknown role can update properties marked with
# '@'
path = self._url('/v2/images/%s' % image_id)
media_type = 'application/openstack-images-v2.1-json-patch'
headers = self._headers({'content-type': media_type,
'X-Roles': 'admin'})
data = jsonutils.dumps([
{'op': 'replace',
'path': '/x_all_permitted_joe_soap', 'value': '2'}
])
response = requests.patch(path, headers=headers, data=data)
self.assertEqual(200, response.status_code, response.text)
image = jsonutils.loads(response.text)
self.assertEqual('2', image['x_all_permitted_joe_soap'])
path = self._url('/v2/images/%s' % image_id)
media_type = 'application/openstack-images-v2.1-json-patch'
headers = self._headers({'content-type': media_type,
'X-Roles': 'joe_soap'})
data = jsonutils.dumps([
{'op': 'replace',
'path': '/x_all_permitted_joe_soap', 'value': '3'}
])
response = requests.patch(path, headers=headers, data=data)
self.assertEqual(200, response.status_code, response.text)
image = jsonutils.loads(response.text)
self.assertEqual('3', image['x_all_permitted_joe_soap'])
# Verify both admin and unknown role can delete properties marked with
# '@'
path = self._url('/v2/images')
headers = self._headers({'content-type': 'application/json',
'X-Roles': 'admin'})
data = jsonutils.dumps({
'name': 'image-1',
'disk_format': 'aki',
'container_format': 'aki',
'x_all_permitted_a': '1',
'x_all_permitted_b': '2'
})
response = requests.post(path, headers=headers, data=data)
self.assertEqual(201, response.status_code)
image = jsonutils.loads(response.text)
image_id = image['id']
path = self._url('/v2/images/%s' % image_id)
media_type = 'application/openstack-images-v2.1-json-patch'
headers = self._headers({'content-type': media_type,
'X-Roles': 'admin'})
data = jsonutils.dumps([
{'op': 'remove', 'path': '/x_all_permitted_a'}
])
response = requests.patch(path, headers=headers, data=data)
self.assertEqual(200, response.status_code, response.text)
image = jsonutils.loads(response.text)
self.assertNotIn('x_all_permitted_a', image.keys())
path = self._url('/v2/images/%s' % image_id)
media_type = 'application/openstack-images-v2.1-json-patch'
headers = self._headers({'content-type': media_type,
'X-Roles': 'joe_soap'})
data = jsonutils.dumps([
{'op': 'remove', 'path': '/x_all_permitted_b'}
])
response = requests.patch(path, headers=headers, data=data)
self.assertEqual(200, response.status_code, response.text)
image = jsonutils.loads(response.text)
self.assertNotIn('x_all_permitted_b', image.keys())
# Verify neither admin nor unknown role can create a property protected
# with '!'
path = self._url('/v2/images')
headers = self._headers({'content-type': 'application/json',
'X-Roles': 'admin'})
data = jsonutils.dumps({
'name': 'image-1',
'disk_format': 'aki',
'container_format': 'aki',
'x_none_permitted_admin': '1'
})
response = requests.post(path, headers=headers, data=data)
self.assertEqual(403, response.status_code)
path = self._url('/v2/images')
headers = self._headers({'content-type': 'application/json',
'X-Roles': 'joe_soap'})
data = jsonutils.dumps({
'name': 'image-1',
'disk_format': 'aki',
'container_format': 'aki',
'x_none_permitted_joe_soap': '1'
})
response = requests.post(path, headers=headers, data=data)
self.assertEqual(403, response.status_code)
# Verify neither admin nor unknown role can read properties marked with
# '!'
path = self._url('/v2/images')
headers = self._headers({'content-type': 'application/json',
'X-Roles': 'admin'})
data = jsonutils.dumps({
'name': 'image-1',
'disk_format': 'aki',
'container_format': 'aki',
'x_none_read': '1'
})
response = requests.post(path, headers=headers, data=data)
self.assertEqual(201, response.status_code)
image = jsonutils.loads(response.text)
image_id = image['id']
self.assertNotIn('x_none_read', image.keys())
headers = self._headers({'content-type': 'application/json',
'X-Roles': 'admin'})
path = self._url('/v2/images/%s' % image_id)
response = requests.get(path, headers=self._headers())
self.assertEqual(200, response.status_code)
image = jsonutils.loads(response.text)
self.assertNotIn('x_none_read', image.keys())
headers = self._headers({'content-type': 'application/json',
'X-Roles': 'joe_soap'})
path = self._url('/v2/images/%s' % image_id)
response = requests.get(path, headers=self._headers())
self.assertEqual(200, response.status_code)
image = jsonutils.loads(response.text)
self.assertNotIn('x_none_read', image.keys())
# Verify neither admin nor unknown role can update properties marked
# with '!'
path = self._url('/v2/images')
headers = self._headers({'content-type': 'application/json',
'X-Roles': 'admin'})
data = jsonutils.dumps({
'name': 'image-1',
'disk_format': 'aki',
'container_format': 'aki',
'x_none_update': '1'
})
response = requests.post(path, headers=headers, data=data)
self.assertEqual(201, response.status_code)
image = jsonutils.loads(response.text)
image_id = image['id']
self.assertEqual('1', image['x_none_update'])
path = self._url('/v2/images/%s' % image_id)
media_type = 'application/openstack-images-v2.1-json-patch'
headers = self._headers({'content-type': media_type,
'X-Roles': 'admin'})
data = jsonutils.dumps([
{'op': 'replace',
'path': '/x_none_update', 'value': '2'}
])
response = requests.patch(path, headers=headers, data=data)
self.assertEqual(403, response.status_code, response.text)
path = self._url('/v2/images/%s' % image_id)
media_type = 'application/openstack-images-v2.1-json-patch'
headers = self._headers({'content-type': media_type,
'X-Roles': 'joe_soap'})
data = jsonutils.dumps([
{'op': 'replace',
'path': '/x_none_update', 'value': '3'}
])
response = requests.patch(path, headers=headers, data=data)
self.assertEqual(409, response.status_code, response.text)
# Verify neither admin nor unknown role can delete properties marked
# with '!'
path = self._url('/v2/images')
headers = self._headers({'content-type': 'application/json',
'X-Roles': 'admin'})
data = jsonutils.dumps({
'name': 'image-1',
'disk_format': 'aki',
'container_format': 'aki',
'x_none_delete': '1',
})
response = requests.post(path, headers=headers, data=data)
self.assertEqual(201, response.status_code)
image = jsonutils.loads(response.text)
image_id = image['id']
path = self._url('/v2/images/%s' % image_id)
media_type = 'application/openstack-images-v2.1-json-patch'
headers = self._headers({'content-type': media_type,
'X-Roles': 'admin'})
data = jsonutils.dumps([
{'op': 'remove', 'path': '/x_none_delete'}
])
response = requests.patch(path, headers=headers, data=data)
self.assertEqual(403, response.status_code, response.text)
path = self._url('/v2/images/%s' % image_id)
media_type = 'application/openstack-images-v2.1-json-patch'
headers = self._headers({'content-type': media_type,
'X-Roles': 'joe_soap'})
data = jsonutils.dumps([
{'op': 'remove', 'path': '/x_none_delete'}
])
response = requests.patch(path, headers=headers, data=data)
self.assertEqual(409, response.status_code, response.text)
self.stop_servers()
def test_tag_lifecycle(self):
self.start_servers(**self.__dict__.copy())
# Create an image with a tag - duplicate should be ignored
path = self._url('/v2/images')
headers = self._headers({'Content-Type': 'application/json'})
data = jsonutils.dumps({'name': 'image-1', 'tags': ['sniff', 'sniff']})
response = requests.post(path, headers=headers, data=data)
self.assertEqual(201, response.status_code)
image_id = jsonutils.loads(response.text)['id']
# Image should show a list with a single tag
path = self._url('/v2/images/%s' % image_id)
response = requests.get(path, headers=self._headers())
self.assertEqual(200, response.status_code)
tags = jsonutils.loads(response.text)['tags']
self.assertEqual(['sniff'], tags)
# Delete all tags
for tag in tags:
path = self._url('/v2/images/%s/tags/%s' % (image_id, tag))
response = requests.delete(path, headers=self._headers())
self.assertEqual(204, response.status_code)
# Update image with too many tags via PUT
# Configured limit is 10 tags
for i in range(10):
path = self._url('/v2/images/%s/tags/foo%i' % (image_id, i))
response = requests.put(path, headers=self._headers())
self.assertEqual(204, response.status_code)
# 11th tag should fail
path = self._url('/v2/images/%s/tags/fail_me' % image_id)
response = requests.put(path, headers=self._headers())
self.assertEqual(413, response.status_code)
# Make sure the 11th tag was not added
path = self._url('/v2/images/%s' % image_id)
response = requests.get(path, headers=self._headers())
self.assertEqual(200, response.status_code)
tags = jsonutils.loads(response.text)['tags']
self.assertEqual(10, len(tags))
# Update image tags via PATCH
path = self._url('/v2/images/%s' % image_id)
media_type = 'application/openstack-images-v2.1-json-patch'
headers = self._headers({'content-type': media_type})
doc = [
{
'op': 'replace',
'path': '/tags',
'value': ['foo'],
},
]
data = jsonutils.dumps(doc)
response = requests.patch(path, headers=headers, data=data)
self.assertEqual(200, response.status_code)
# Update image with too many tags via PATCH
# Configured limit is 10 tags
path = self._url('/v2/images/%s' % image_id)
media_type = 'application/openstack-images-v2.1-json-patch'
headers = self._headers({'content-type': media_type})
tags = ['foo%d' % i for i in range(11)]
doc = [
{
'op': 'replace',
'path': '/tags',
'value': tags,
},
]
data = jsonutils.dumps(doc)
response = requests.patch(path, headers=headers, data=data)
self.assertEqual(413, response.status_code)
# Tags should not have changed since request was over limit
path = self._url('/v2/images/%s' % image_id)
response = requests.get(path, headers=self._headers())
self.assertEqual(200, response.status_code)
tags = jsonutils.loads(response.text)['tags']
self.assertEqual(['foo'], tags)
# Update image with duplicate tag - it should be ignored
path = self._url('/v2/images/%s' % image_id)
media_type = 'application/openstack-images-v2.1-json-patch'
headers = self._headers({'content-type': media_type})
doc = [
{
'op': 'replace',
'path': '/tags',
'value': ['sniff', 'snozz', 'snozz'],
},
]
data = jsonutils.dumps(doc)
response = requests.patch(path, headers=headers, data=data)
self.assertEqual(200, response.status_code)
tags = jsonutils.loads(response.text)['tags']
self.assertEqual(['sniff', 'snozz'], sorted(tags))
# Image should show the appropriate tags
path = self._url('/v2/images/%s' % image_id)
response = requests.get(path, headers=self._headers())
self.assertEqual(200, response.status_code)
tags = jsonutils.loads(response.text)['tags']
self.assertEqual(['sniff', 'snozz'], sorted(tags))
# Attempt to tag the image with a duplicate should be ignored
path = self._url('/v2/images/%s/tags/snozz' % image_id)
response = requests.put(path, headers=self._headers())
self.assertEqual(204, response.status_code)
# Create another more complex tag
path = self._url('/v2/images/%s/tags/gabe%%40example.com' % image_id)
response = requests.put(path, headers=self._headers())
self.assertEqual(204, response.status_code)
# Double-check that the tags container on the image is populated
path = self._url('/v2/images/%s' % image_id)
response = requests.get(path, headers=self._headers())
self.assertEqual(200, response.status_code)
tags = jsonutils.loads(response.text)['tags']
self.assertEqual(['gabe@example.com', 'sniff', 'snozz'],
sorted(tags))
# Query images by single tag
path = self._url('/v2/images?tag=sniff')
response = requests.get(path, headers=self._headers())
self.assertEqual(200, response.status_code)
images = jsonutils.loads(response.text)['images']
self.assertEqual(1, len(images))
self.assertEqual('image-1', images[0]['name'])
# Query images by multiple tags
path = self._url('/v2/images?tag=sniff&tag=snozz')
response = requests.get(path, headers=self._headers())
self.assertEqual(200, response.status_code)
images = jsonutils.loads(response.text)['images']
self.assertEqual(1, len(images))
self.assertEqual('image-1', images[0]['name'])
# Query images by tag and other attributes
path = self._url('/v2/images?tag=sniff&status=queued')
response = requests.get(path, headers=self._headers())
self.assertEqual(200, response.status_code)
images = jsonutils.loads(response.text)['images']
self.assertEqual(1, len(images))
self.assertEqual('image-1', images[0]['name'])
# Query images by tag and a nonexistent tag
path = self._url('/v2/images?tag=sniff&tag=fake')
response = requests.get(path, headers=self._headers())
self.assertEqual(200, response.status_code)
images = jsonutils.loads(response.text)['images']
self.assertEqual(0, len(images))
# The tag should be deletable
path = self._url('/v2/images/%s/tags/gabe%%40example.com' % image_id)
response = requests.delete(path, headers=self._headers())
self.assertEqual(204, response.status_code)
# List of tags should reflect the deletion
path = self._url('/v2/images/%s' % image_id)
response = requests.get(path, headers=self._headers())
self.assertEqual(200, response.status_code)
tags = jsonutils.loads(response.text)['tags']
self.assertEqual(['sniff', 'snozz'], sorted(tags))
# Deleting the same tag should return a 404
path = self._url('/v2/images/%s/tags/gabe%%40example.com' % image_id)
response = requests.delete(path, headers=self._headers())
self.assertEqual(404, response.status_code)
# The tags won't be able to query the images after deleting
path = self._url('/v2/images?tag=gabe%%40example.com')
response = requests.get(path, headers=self._headers())
self.assertEqual(200, response.status_code)
images = jsonutils.loads(response.text)['images']
self.assertEqual(0, len(images))
self.stop_servers()
def test_images_container(self):
# Image list should be empty and no next link should be present
self.start_servers(**self.__dict__.copy())
path = self._url('/v2/images')
response = requests.get(path, headers=self._headers())
self.assertEqual(200, response.status_code)
images = jsonutils.loads(response.text)['images']
first = jsonutils.loads(response.text)['first']
self.assertEqual(0, len(images))
self.assertNotIn('next', jsonutils.loads(response.text))
self.assertEqual('/v2/images', first)
# Create 7 images
images = []
fixtures = [
{'name': 'image-3', 'type': 'kernel', 'ping': 'pong'},
{'name': 'image-4', 'type': 'kernel', 'ping': 'pong'},
{'name': 'image-1', 'type': 'kernel', 'ping': 'pong'},
{'name': 'image-3', 'type': 'ramdisk', 'ping': 'pong'},
{'name': 'image-2', 'type': 'kernel', 'ping': 'ding'},
{'name': 'image-3', 'type': 'kernel', 'ping': 'pong'},
{'name': 'image-2', 'type': 'kernel', 'ping': 'pong'},
]
path = self._url('/v2/images')
headers = self._headers({'content-type': 'application/json'})
for fixture in fixtures:
data = jsonutils.dumps(fixture)
response = requests.post(path, headers=headers, data=data)
self.assertEqual(201, response.status_code)
images.append(jsonutils.loads(response.text))
# Image list should contain 7 images
path = self._url('/v2/images')
response = requests.get(path, headers=self._headers())
self.assertEqual(200, response.status_code)
body = jsonutils.loads(response.text)
self.assertEqual(7, len(body['images']))
self.assertEqual('/v2/images', body['first'])
self.assertNotIn('next', jsonutils.loads(response.text))
# Begin pagination after the first image
template_url = ('/v2/images?limit=2&sort_dir=asc&sort_key=name'
'&marker=%s&type=kernel&ping=pong')
path = self._url(template_url % images[2]['id'])
response = requests.get(path, headers=self._headers())
self.assertEqual(200, response.status_code)
body = jsonutils.loads(response.text)
self.assertEqual(2, len(body['images']))
response_ids = [image['id'] for image in body['images']]
self.assertEqual([images[6]['id'], images[0]['id']], response_ids)
# Continue pagination using next link from previous request
path = self._url(body['next'])
response = requests.get(path, headers=self._headers())
self.assertEqual(200, response.status_code)
body = jsonutils.loads(response.text)
self.assertEqual(2, len(body['images']))
response_ids = [image['id'] for image in body['images']]
self.assertEqual([images[5]['id'], images[1]['id']], response_ids)
# Continue pagination - expect no results
path = self._url(body['next'])
response = requests.get(path, headers=self._headers())
self.assertEqual(200, response.status_code)
body = jsonutils.loads(response.text)
self.assertEqual(0, len(body['images']))
# Delete first image
path = self._url('/v2/images/%s' % images[0]['id'])
response = requests.delete(path, headers=self._headers())
self.assertEqual(204, response.status_code)
# Ensure bad request for using a deleted image as marker
path = self._url('/v2/images?marker=%s' % images[0]['id'])
response = requests.get(path, headers=self._headers())
self.assertEqual(400, response.status_code)
self.stop_servers()
def test_image_visibility_to_different_users(self):
self.cleanup()
self.api_server.deployment_flavor = 'fakeauth'
self.registry_server.deployment_flavor = 'fakeauth'
kwargs = self.__dict__.copy()
kwargs['use_user_token'] = True
self.start_servers(**kwargs)
owners = ['admin', 'tenant1', 'tenant2', 'none']
visibilities = ['public', 'private']
for owner in owners:
for visibility in visibilities:
path = self._url('/v2/images')
headers = self._headers({
'content-type': 'application/json',
'X-Auth-Token': 'createuser:%s:admin' % owner,
})
data = jsonutils.dumps({
'name': '%s-%s' % (owner, visibility),
'visibility': visibility,
})
response = requests.post(path, headers=headers, data=data)
self.assertEqual(201, response.status_code)
def list_images(tenant, role='', visibility=None):
auth_token = 'user:%s:%s' % (tenant, role)
headers = {'X-Auth-Token': auth_token}
path = self._url('/v2/images')
if visibility is not None:
path += '?visibility=%s' % visibility
response = requests.get(path, headers=headers)
self.assertEqual(200, response.status_code)
return jsonutils.loads(response.text)['images']
# 1. Known user sees public and their own images
images = list_images('tenant1')
self.assertEqual(5, len(images))
for image in images:
self.assertTrue(image['visibility'] == 'public'
or 'tenant1' in image['name'])
# 2. Known user, visibility=public, sees all public images
images = list_images('tenant1', visibility='public')
self.assertEqual(4, len(images))
for image in images:
self.assertEqual('public', image['visibility'])
# 3. Known user, visibility=private, sees only their private image
images = list_images('tenant1', visibility='private')
self.assertEqual(1, len(images))
image = images[0]
self.assertEqual('private', image['visibility'])
self.assertIn('tenant1', image['name'])
# 4. Unknown user sees only public images
images = list_images('none')
self.assertEqual(4, len(images))
for image in images:
self.assertEqual('public', image['visibility'])
# 5. Unknown user, visibility=public, sees only public images
images = list_images('none', visibility='public')
self.assertEqual(4, len(images))
for image in images:
self.assertEqual('public', image['visibility'])
# 6. Unknown user, visibility=private, sees no images
images = list_images('none', visibility='private')
self.assertEqual(0, len(images))
# 7. Unknown admin sees all images
images = list_images('none', role='admin')
self.assertEqual(8, len(images))
# 8. Unknown admin, visibility=public, shows only public images
images = list_images('none', role='admin', visibility='public')
self.assertEqual(4, len(images))
for image in images:
self.assertEqual('public', image['visibility'])
# 9. Unknown admin, visibility=private, sees only private images
images = list_images('none', role='admin', visibility='private')
self.assertEqual(4, len(images))
for image in images:
self.assertEqual('private', image['visibility'])
# 10. Known admin sees all images
images = list_images('admin', role='admin')
self.assertEqual(8, len(images))
# 11. Known admin, visibility=public, sees all public images
images = list_images('admin', role='admin', visibility='public')
self.assertEqual(4, len(images))
for image in images:
self.assertEqual('public', image['visibility'])
# 12. Known admin, visibility=private, sees all private images
images = list_images('admin', role='admin', visibility='private')
self.assertEqual(4, len(images))
for image in images:
self.assertEqual('private', image['visibility'])
self.stop_servers()
def test_update_locations(self):
self.start_servers(**self.__dict__.copy())
# Create an image
path = self._url('/v2/images')
headers = self._headers({'content-type': 'application/json'})
data = jsonutils.dumps({'name': 'image-1', 'disk_format': 'aki',
'container_format': 'aki'})
response = requests.post(path, headers=headers, data=data)
self.assertEqual(201, response.status_code)
# Returned image entity should have a generated id and status
image = jsonutils.loads(response.text)
image_id = image['id']
self.assertEqual('queued', image['status'])
self.assertIsNone(image['size'])
self.assertIsNone(image['virtual_size'])
# Update locations for the queued image
path = self._url('/v2/images/%s' % image_id)
media_type = 'application/openstack-images-v2.1-json-patch'
headers = self._headers({'content-type': media_type})
url = 'http://127.0.0.1:%s/foo_image' % self.http_port0
data = jsonutils.dumps([{'op': 'replace', 'path': '/locations',
'value': [{'url': url, 'metadata': {}}]
}])
response = requests.patch(path, headers=headers, data=data)
self.assertEqual(200, response.status_code, response.text)
# The image size should be updated
path = self._url('/v2/images/%s' % image_id)
response = requests.get(path, headers=headers)
self.assertEqual(200, response.status_code)
image = jsonutils.loads(response.text)
self.assertEqual(10, image['size'])
def test_update_locations_with_restricted_sources(self):
self.start_servers(**self.__dict__.copy())
# Create an image
path = self._url('/v2/images')
headers = self._headers({'content-type': 'application/json'})
data = jsonutils.dumps({'name': 'image-1', 'disk_format': 'aki',
'container_format': 'aki'})
response = requests.post(path, headers=headers, data=data)
self.assertEqual(201, response.status_code)
# Returned image entity should have a generated id and status
image = jsonutils.loads(response.text)
image_id = image['id']
self.assertEqual('queued', image['status'])
self.assertIsNone(image['size'])
self.assertIsNone(image['virtual_size'])
# Update locations for the queued image
path = self._url('/v2/images/%s' % image_id)
media_type = 'application/openstack-images-v2.1-json-patch'
headers = self._headers({'content-type': media_type})
data = jsonutils.dumps([{'op': 'replace', 'path': '/locations',
'value': [{'url': 'file:///foo_image',
'metadata': {}}]
}])
response = requests.patch(path, headers=headers, data=data)
self.assertEqual(400, response.status_code, response.text)
data = jsonutils.dumps([{'op': 'replace', 'path': '/locations',
'value': [{'url': 'swift+config:///foo_image',
'metadata': {}}]
}])
response = requests.patch(path, headers=headers, data=data)
self.assertEqual(400, response.status_code, response.text)
class TestImagesWithRegistry(TestImages):
def setUp(self):
super(TestImagesWithRegistry, self).setUp()
self.api_server.data_api = (
'glance.tests.functional.v2.registry_data_api')
self.registry_server.deployment_flavor = 'trusted-auth'
class TestImageDirectURLVisibility(functional.FunctionalTest):
def setUp(self):
super(TestImageDirectURLVisibility, self).setUp()
self.cleanup()
self.api_server.deployment_flavor = 'noauth'
def _url(self, path):
return 'http://127.0.0.1:%d%s' % (self.api_port, path)
def _headers(self, custom_headers=None):
base_headers = {
'X-Identity-Status': 'Confirmed',
'X-Auth-Token': '932c5c84-02ac-4fe5-a9ba-620af0e2bb96',
'X-User-Id': 'f9a41d13-0c13-47e9-bee2-ce4e8bfe958e',
'X-Tenant-Id': TENANT1,
'X-Roles': 'member',
}
base_headers.update(custom_headers or {})
return base_headers
def test_v2_not_enabled(self):
self.api_server.enable_v2_api = False
self.start_servers(**self.__dict__.copy())
path = self._url('/v2/images')
response = requests.get(path, headers=self._headers())
self.assertEqual(300, response.status_code)
self.stop_servers()
def test_v2_enabled(self):
self.api_server.enable_v2_api = True
self.start_servers(**self.__dict__.copy())
path = self._url('/v2/images')
response = requests.get(path, headers=self._headers())
self.assertEqual(200, response.status_code)
self.stop_servers()
def test_image_direct_url_visible(self):
self.api_server.show_image_direct_url = True
self.start_servers(**self.__dict__.copy())
# Image list should be empty
path = self._url('/v2/images')
response = requests.get(path, headers=self._headers())
self.assertEqual(200, response.status_code)
images = jsonutils.loads(response.text)['images']
self.assertEqual(0, len(images))
# Create an image
path = self._url('/v2/images')
headers = self._headers({'content-type': 'application/json'})
data = jsonutils.dumps({'name': 'image-1', 'type': 'kernel',
'foo': 'bar', 'disk_format': 'aki',
'container_format': 'aki',
'visibility': 'public'})
response = requests.post(path, headers=headers, data=data)
self.assertEqual(201, response.status_code)
# Get the image id
image = jsonutils.loads(response.text)
image_id = image['id']
# Image direct_url should not be visible before location is set
path = self._url('/v2/images/%s' % image_id)
headers = self._headers({'Content-Type': 'application/json'})
response = requests.get(path, headers=headers)
self.assertEqual(200, response.status_code)
image = jsonutils.loads(response.text)
self.assertNotIn('direct_url', image)
# Upload some image data, setting the image location
path = self._url('/v2/images/%s/file' % image_id)
headers = self._headers({'Content-Type': 'application/octet-stream'})
response = requests.put(path, headers=headers, data='ZZZZZ')
self.assertEqual(204, response.status_code)
# Image direct_url should be visible
path = self._url('/v2/images/%s' % image_id)
headers = self._headers({'Content-Type': 'application/json'})
response = requests.get(path, headers=headers)
self.assertEqual(200, response.status_code)
image = jsonutils.loads(response.text)
self.assertIn('direct_url', image)
# Image direct_url should be visible to non-owner, non-admin user
path = self._url('/v2/images/%s' % image_id)
headers = self._headers({'Content-Type': 'application/json',
'X-Tenant-Id': TENANT2})
response = requests.get(path, headers=headers)
self.assertEqual(200, response.status_code)
image = jsonutils.loads(response.text)
self.assertIn('direct_url', image)
# Image direct_url should be visible in a list
path = self._url('/v2/images')
headers = self._headers({'Content-Type': 'application/json'})
response = requests.get(path, headers=headers)
self.assertEqual(200, response.status_code)
image = jsonutils.loads(response.text)['images'][0]
self.assertIn('direct_url', image)
self.stop_servers()
def test_image_multiple_location_url_visible(self):
self.api_server.show_multiple_locations = True
self.start_servers(**self.__dict__.copy())
# Create an image
path = self._url('/v2/images')
headers = self._headers({'content-type': 'application/json'})
data = jsonutils.dumps({'name': 'image-1', 'type': 'kernel',
'foo': 'bar', 'disk_format': 'aki',
'container_format': 'aki'})
response = requests.post(path, headers=headers, data=data)
self.assertEqual(201, response.status_code)
# Get the image id
image = jsonutils.loads(response.text)
image_id = image['id']
# Image locations should not be visible before location is set
path = self._url('/v2/images/%s' % image_id)
headers = self._headers({'Content-Type': 'application/json'})
response = requests.get(path, headers=headers)
self.assertEqual(200, response.status_code)
image = jsonutils.loads(response.text)
self.assertIn('locations', image)
self.assertTrue(image["locations"] == [])
# Upload some image data, setting the image location
path = self._url('/v2/images/%s/file' % image_id)
headers = self._headers({'Content-Type': 'application/octet-stream'})
response = requests.put(path, headers=headers, data='ZZZZZ')
self.assertEqual(204, response.status_code)
# Image locations should be visible
path = self._url('/v2/images/%s' % image_id)
headers = self._headers({'Content-Type': 'application/json'})
response = requests.get(path, headers=headers)
self.assertEqual(200, response.status_code)
image = jsonutils.loads(response.text)
self.assertIn('locations', image)
loc = image['locations']
self.assertTrue(len(loc) > 0)
loc = loc[0]
self.assertIn('url', loc)
self.assertIn('metadata', loc)
self.stop_servers()
def test_image_direct_url_not_visible(self):
self.api_server.show_image_direct_url = False
self.start_servers(**self.__dict__.copy())
# Image list should be empty
path = self._url('/v2/images')
response = requests.get(path, headers=self._headers())
self.assertEqual(200, response.status_code)
images = jsonutils.loads(response.text)['images']
self.assertEqual(0, len(images))
# Create an image
path = self._url('/v2/images')
headers = self._headers({'content-type': 'application/json'})
data = jsonutils.dumps({'name': 'image-1', 'type': 'kernel',
'foo': 'bar', 'disk_format': 'aki',
'container_format': 'aki'})
response = requests.post(path, headers=headers, data=data)
self.assertEqual(201, response.status_code)
# Get the image id
image = jsonutils.loads(response.text)
image_id = image['id']
# Upload some image data, setting the image location
path = self._url('/v2/images/%s/file' % image_id)
headers = self._headers({'Content-Type': 'application/octet-stream'})
response = requests.put(path, headers=headers, data='ZZZZZ')
self.assertEqual(204, response.status_code)
# Image direct_url should not be visible
path = self._url('/v2/images/%s' % image_id)
headers = self._headers({'Content-Type': 'application/json'})
response = requests.get(path, headers=headers)
self.assertEqual(200, response.status_code)
image = jsonutils.loads(response.text)
self.assertNotIn('direct_url', image)
# Image direct_url should not be visible in a list
path = self._url('/v2/images')
headers = self._headers({'Content-Type': 'application/json'})
response = requests.get(path, headers=headers)
self.assertEqual(200, response.status_code)
image = jsonutils.loads(response.text)['images'][0]
self.assertNotIn('direct_url', image)
self.stop_servers()
class TestImageDirectURLVisibilityWithRegistry(TestImageDirectURLVisibility):
def setUp(self):
super(TestImageDirectURLVisibilityWithRegistry, self).setUp()
self.api_server.data_api = (
'glance.tests.functional.v2.registry_data_api')
self.registry_server.deployment_flavor = 'trusted-auth'
class TestImageLocationSelectionStrategy(functional.FunctionalTest):
def setUp(self):
super(TestImageLocationSelectionStrategy, self).setUp()
self.cleanup()
self.api_server.deployment_flavor = 'noauth'
for i in range(3):
ret = test_utils.start_http_server("foo_image_id%d" % i,
"foo_image%d" % i)
setattr(self, 'http_server%d_pid' % i, ret[0])
setattr(self, 'http_port%d' % i, ret[1])
def tearDown(self):
for i in range(3):
pid = getattr(self, 'http_server%d_pid' % i, None)
if pid:
os.kill(pid, signal.SIGKILL)
super(TestImageLocationSelectionStrategy, self).tearDown()
def _url(self, path):
return 'http://127.0.0.1:%d%s' % (self.api_port, path)
def _headers(self, custom_headers=None):
base_headers = {
'X-Identity-Status': 'Confirmed',
'X-Auth-Token': '932c5c84-02ac-4fe5-a9ba-620af0e2bb96',
'X-User-Id': 'f9a41d13-0c13-47e9-bee2-ce4e8bfe958e',
'X-Tenant-Id': TENANT1,
'X-Roles': 'member',
}
base_headers.update(custom_headers or {})
return base_headers
def test_image_locations_with_order_strategy(self):
self.api_server.show_image_direct_url = True
self.api_server.show_multiple_locations = True
self.image_location_quota = 10
self.api_server.location_strategy = 'location_order'
preference = "http, swift, filesystem"
self.api_server.store_type_location_strategy_preference = preference
self.start_servers(**self.__dict__.copy())
# Create an image
path = self._url('/v2/images')
headers = self._headers({'content-type': 'application/json'})
data = jsonutils.dumps({'name': 'image-1', 'type': 'kernel',
'foo': 'bar', 'disk_format': 'aki',
'container_format': 'aki'})
response = requests.post(path, headers=headers, data=data)
self.assertEqual(201, response.status_code)
# Get the image id
image = jsonutils.loads(response.text)
image_id = image['id']
# Image locations should not be visible before location is set
path = self._url('/v2/images/%s' % image_id)
headers = self._headers({'Content-Type': 'application/json'})
response = requests.get(path, headers=headers)
self.assertEqual(200, response.status_code)
image = jsonutils.loads(response.text)
self.assertIn('locations', image)
self.assertTrue(image["locations"] == [])
# Update image locations via PATCH
path = self._url('/v2/images/%s' % image_id)
media_type = 'application/openstack-images-v2.1-json-patch'
headers = self._headers({'content-type': media_type})
values = [{'url': 'http://127.0.0.1:%s/foo_image' % self.http_port0,
'metadata': {}},
{'url': 'http://127.0.0.1:%s/foo_image' % self.http_port1,
'metadata': {}}]
doc = [{'op': 'replace',
'path': '/locations',
'value': values}]
data = jsonutils.dumps(doc)
response = requests.patch(path, headers=headers, data=data)
self.assertEqual(200, response.status_code)
# Image locations should be visible
path = self._url('/v2/images/%s' % image_id)
headers = self._headers({'Content-Type': 'application/json'})
response = requests.get(path, headers=headers)
self.assertEqual(200, response.status_code)
image = jsonutils.loads(response.text)
self.assertIn('locations', image)
self.assertEqual(values, image['locations'])
self.assertIn('direct_url', image)
self.assertEqual(values[0]['url'], image['direct_url'])
self.stop_servers()
class TestImageLocationSelectionStrategyWithRegistry(
TestImageLocationSelectionStrategy):
def setUp(self):
super(TestImageLocationSelectionStrategyWithRegistry, self).setUp()
self.api_server.data_api = (
'glance.tests.functional.v2.registry_data_api')
self.registry_server.deployment_flavor = 'trusted-auth'
class TestImageMembers(functional.FunctionalTest):
def setUp(self):
super(TestImageMembers, self).setUp()
self.cleanup()
self.api_server.deployment_flavor = 'fakeauth'
self.registry_server.deployment_flavor = 'fakeauth'
self.start_servers(**self.__dict__.copy())
def _url(self, path):
return 'http://127.0.0.1:%d%s' % (self.api_port, path)
def _headers(self, custom_headers=None):
base_headers = {
'X-Identity-Status': 'Confirmed',
'X-Auth-Token': '932c5c84-02ac-4fe5-a9ba-620af0e2bb96',
'X-User-Id': 'f9a41d13-0c13-47e9-bee2-ce4e8bfe958e',
'X-Tenant-Id': TENANT1,
'X-Roles': 'member',
}
base_headers.update(custom_headers or {})
return base_headers
def test_image_member_lifecycle(self):
def get_header(tenant, role=''):
auth_token = 'user:%s:%s' % (tenant, role)
headers = {'X-Auth-Token': auth_token}
return headers
# Image list should be empty
path = self._url('/v2/images')
response = requests.get(path, headers=get_header('tenant1'))
self.assertEqual(200, response.status_code)
images = jsonutils.loads(response.text)['images']
self.assertEqual(0, len(images))
owners = ['tenant1', 'tenant2', 'admin']
visibilities = ['public', 'private']
image_fixture = []
for owner in owners:
for visibility in visibilities:
path = self._url('/v2/images')
headers = self._headers({
'content-type': 'application/json',
'X-Auth-Token': 'createuser:%s:admin' % owner,
})
data = jsonutils.dumps({
'name': '%s-%s' % (owner, visibility),
'visibility': visibility,
})
response = requests.post(path, headers=headers, data=data)
self.assertEqual(201, response.status_code)
image_fixture.append(jsonutils.loads(response.text))
# Image list should contain 4 images for tenant1
path = self._url('/v2/images')
response = requests.get(path, headers=get_header('tenant1'))
self.assertEqual(200, response.status_code)
images = jsonutils.loads(response.text)['images']
self.assertEqual(4, len(images))
# Image list should contain 3 images for TENANT3
path = self._url('/v2/images')
response = requests.get(path, headers=get_header(TENANT3))
self.assertEqual(200, response.status_code)
images = jsonutils.loads(response.text)['images']
self.assertEqual(3, len(images))
# Add Image member for tenant1-private image
path = self._url('/v2/images/%s/members' % image_fixture[1]['id'])
body = jsonutils.dumps({'member': TENANT3})
response = requests.post(path, headers=get_header('tenant1'),
data=body)
self.assertEqual(200, response.status_code)
image_member = jsonutils.loads(response.text)
self.assertEqual(image_fixture[1]['id'], image_member['image_id'])
self.assertEqual(TENANT3, image_member['member_id'])
self.assertIn('created_at', image_member)
self.assertIn('updated_at', image_member)
self.assertEqual('pending', image_member['status'])
# Image list should contain 3 images for TENANT3
path = self._url('/v2/images')
response = requests.get(path, headers=get_header(TENANT3))
self.assertEqual(200, response.status_code)
images = jsonutils.loads(response.text)['images']
self.assertEqual(3, len(images))
# Image list should contain 0 shared images for TENANT3
# because default is accepted
path = self._url('/v2/images?visibility=shared')
response = requests.get(path, headers=get_header(TENANT3))
self.assertEqual(200, response.status_code)
images = jsonutils.loads(response.text)['images']
self.assertEqual(0, len(images))
# Image list should contain 4 images for TENANT3 with status pending
path = self._url('/v2/images?member_status=pending')
response = requests.get(path, headers=get_header(TENANT3))
self.assertEqual(200, response.status_code)
images = jsonutils.loads(response.text)['images']
self.assertEqual(4, len(images))
# Image list should contain 4 images for TENANT3 with status all
path = self._url('/v2/images?member_status=all')
response = requests.get(path, headers=get_header(TENANT3))
self.assertEqual(200, response.status_code)
images = jsonutils.loads(response.text)['images']
self.assertEqual(4, len(images))
# Image list should contain 1 image for TENANT3 with status pending
# and visibility shared
path = self._url('/v2/images?member_status=pending&visibility=shared')
response = requests.get(path, headers=get_header(TENANT3))
self.assertEqual(200, response.status_code)
images = jsonutils.loads(response.text)['images']
self.assertEqual(1, len(images))
self.assertEqual(images[0]['name'], 'tenant1-private')
# Image list should contain 0 image for TENANT3 with status rejected
# and visibility shared
path = self._url('/v2/images?member_status=rejected&visibility=shared')
response = requests.get(path, headers=get_header(TENANT3))
self.assertEqual(200, response.status_code)
images = jsonutils.loads(response.text)['images']
self.assertEqual(0, len(images))
# Image list should contain 0 image for TENANT3 with status accepted
# and visibility shared
path = self._url('/v2/images?member_status=accepted&visibility=shared')
response = requests.get(path, headers=get_header(TENANT3))
self.assertEqual(200, response.status_code)
images = jsonutils.loads(response.text)['images']
self.assertEqual(0, len(images))
# Image list should contain 0 image for TENANT3 with status accepted
# and visibility private
path = self._url('/v2/images?visibility=private')
response = requests.get(path, headers=get_header(TENANT3))
self.assertEqual(200, response.status_code)
images = jsonutils.loads(response.text)['images']
self.assertEqual(0, len(images))
# Image tenant2-private's image members list should contain no members
path = self._url('/v2/images/%s/members' % image_fixture[3]['id'])
response = requests.get(path, headers=get_header('tenant2'))
self.assertEqual(200, response.status_code)
body = jsonutils.loads(response.text)
self.assertEqual(0, len(body['members']))
# Tenant 1, who is the owner cannot change status of image member
path = self._url('/v2/images/%s/members/%s' % (image_fixture[1]['id'],
TENANT3))
body = jsonutils.dumps({'status': 'accepted'})
response = requests.put(path, headers=get_header('tenant1'), data=body)
self.assertEqual(403, response.status_code)
# Tenant 1, who is the owner can get status of its own image member
path = self._url('/v2/images/%s/members/%s' % (image_fixture[1]['id'],
TENANT3))
response = requests.get(path, headers=get_header('tenant1'))
self.assertEqual(200, response.status_code)
body = jsonutils.loads(response.text)
self.assertEqual('pending', body['status'])
self.assertEqual(image_fixture[1]['id'], body['image_id'])
self.assertEqual(TENANT3, body['member_id'])
# Tenant 3, who is the member can get status of its own status
path = self._url('/v2/images/%s/members/%s' % (image_fixture[1]['id'],
TENANT3))
response = requests.get(path, headers=get_header(TENANT3))
self.assertEqual(200, response.status_code)
body = jsonutils.loads(response.text)
self.assertEqual('pending', body['status'])
self.assertEqual(image_fixture[1]['id'], body['image_id'])
self.assertEqual(TENANT3, body['member_id'])
# Tenant 2, who not the owner cannot get status of image member
path = self._url('/v2/images/%s/members/%s' % (image_fixture[1]['id'],
TENANT3))
response = requests.get(path, headers=get_header('tenant2'))
self.assertEqual(404, response.status_code)
# Tenant 3 can change status of image member
path = self._url('/v2/images/%s/members/%s' % (image_fixture[1]['id'],
TENANT3))
body = jsonutils.dumps({'status': 'accepted'})
response = requests.put(path, headers=get_header(TENANT3), data=body)
self.assertEqual(200, response.status_code)
image_member = jsonutils.loads(response.text)
self.assertEqual(image_fixture[1]['id'], image_member['image_id'])
self.assertEqual(TENANT3, image_member['member_id'])
self.assertEqual('accepted', image_member['status'])
# Image list should contain 4 images for TENANT3 because status is
# accepted
path = self._url('/v2/images')
response = requests.get(path, headers=get_header(TENANT3))
self.assertEqual(200, response.status_code)
images = jsonutils.loads(response.text)['images']
self.assertEqual(4, len(images))
# Tenant 3 invalid status change
path = self._url('/v2/images/%s/members/%s' % (image_fixture[1]['id'],
TENANT3))
body = jsonutils.dumps({'status': 'invalid-status'})
response = requests.put(path, headers=get_header(TENANT3), data=body)
self.assertEqual(400, response.status_code)
# Owner cannot change status of image
path = self._url('/v2/images/%s/members/%s' % (image_fixture[1]['id'],
TENANT3))
body = jsonutils.dumps({'status': 'accepted'})
response = requests.put(path, headers=get_header('tenant1'), data=body)
self.assertEqual(403, response.status_code)
# Add Image member for tenant2-private image
path = self._url('/v2/images/%s/members' % image_fixture[3]['id'])
body = jsonutils.dumps({'member': TENANT4})
response = requests.post(path, headers=get_header('tenant2'),
data=body)
self.assertEqual(200, response.status_code)
image_member = jsonutils.loads(response.text)
self.assertEqual(image_fixture[3]['id'], image_member['image_id'])
self.assertEqual(TENANT4, image_member['member_id'])
self.assertIn('created_at', image_member)
self.assertIn('updated_at', image_member)
# Add Image member to public image
path = self._url('/v2/images/%s/members' % image_fixture[0]['id'])
body = jsonutils.dumps({'member': TENANT2})
response = requests.post(path, headers=get_header('tenant1'),
data=body)
self.assertEqual(403, response.status_code)
# Image tenant1-private's members list should contain 1 member
path = self._url('/v2/images/%s/members' % image_fixture[1]['id'])
response = requests.get(path, headers=get_header('tenant1'))
self.assertEqual(200, response.status_code)
body = jsonutils.loads(response.text)
self.assertEqual(1, len(body['members']))
# Admin can see any members
path = self._url('/v2/images/%s/members' % image_fixture[1]['id'])
response = requests.get(path, headers=get_header('tenant1', 'admin'))
self.assertEqual(200, response.status_code)
body = jsonutils.loads(response.text)
self.assertEqual(1, len(body['members']))
# Image members not found for private image not owned by TENANT 1
path = self._url('/v2/images/%s/members' % image_fixture[3]['id'])
response = requests.get(path, headers=get_header('tenant1'))
self.assertEqual(404, response.status_code)
# Image members forbidden for public image
path = self._url('/v2/images/%s/members' % image_fixture[0]['id'])
response = requests.get(path, headers=get_header('tenant1'))
self.assertIn("Public images do not have members", response.text)
self.assertEqual(403, response.status_code)
# Image Member Cannot delete Image membership
path = self._url('/v2/images/%s/members/%s' % (image_fixture[1]['id'],
TENANT3))
response = requests.delete(path, headers=get_header(TENANT3))
self.assertEqual(403, response.status_code)
# Delete Image member
path = self._url('/v2/images/%s/members/%s' % (image_fixture[1]['id'],
TENANT3))
response = requests.delete(path, headers=get_header('tenant1'))
self.assertEqual(204, response.status_code)
# Now the image has no members
path = self._url('/v2/images/%s/members' % image_fixture[1]['id'])
response = requests.get(path, headers=get_header('tenant1'))
self.assertEqual(200, response.status_code)
body = jsonutils.loads(response.text)
self.assertEqual(0, len(body['members']))
# Adding 11 image members should fail since configured limit is 10
path = self._url('/v2/images/%s/members' % image_fixture[1]['id'])
for i in range(10):
body = jsonutils.dumps({'member': str(uuid.uuid4())})
response = requests.post(path, headers=get_header('tenant1'),
data=body)
self.assertEqual(200, response.status_code)
body = jsonutils.dumps({'member': str(uuid.uuid4())})
response = requests.post(path, headers=get_header('tenant1'),
data=body)
self.assertEqual(413, response.status_code)
# Get Image member should return not found for public image
path = self._url('/v2/images/%s/members/%s' % (image_fixture[0]['id'],
TENANT3))
response = requests.get(path, headers=get_header('tenant1'))
self.assertEqual(404, response.status_code)
# Delete Image member should return forbidden for public image
path = self._url('/v2/images/%s/members/%s' % (image_fixture[0]['id'],
TENANT3))
response = requests.delete(path, headers=get_header('tenant1'))
self.assertEqual(403, response.status_code)
self.stop_servers()
class TestImageMembersWithRegistry(TestImageMembers):
def setUp(self):
super(TestImageMembersWithRegistry, self).setUp()
self.api_server.data_api = (
'glance.tests.functional.v2.registry_data_api')
self.registry_server.deployment_flavor = 'trusted-auth'
class TestQuotas(functional.FunctionalTest):
def setUp(self):
super(TestQuotas, self).setUp()
self.cleanup()
self.api_server.deployment_flavor = 'noauth'
self.registry_server.deployment_flavor = 'trusted-auth'
self.user_storage_quota = 100
self.start_servers(**self.__dict__.copy())
def _url(self, path):
return 'http://127.0.0.1:%d%s' % (self.api_port, path)
def _headers(self, custom_headers=None):
base_headers = {
'X-Identity-Status': 'Confirmed',
'X-Auth-Token': '932c5c84-02ac-4fe5-a9ba-620af0e2bb96',
'X-User-Id': 'f9a41d13-0c13-47e9-bee2-ce4e8bfe958e',
'X-Tenant-Id': TENANT1,
'X-Roles': 'member',
}
base_headers.update(custom_headers or {})
return base_headers
def _upload_image_test(self, data_src, expected_status):
# Image list should be empty
path = self._url('/v2/images')
response = requests.get(path, headers=self._headers())
self.assertEqual(200, response.status_code)
images = jsonutils.loads(response.text)['images']
self.assertEqual(0, len(images))
# Create an image (with a deployer-defined property)
path = self._url('/v2/images')
headers = self._headers({'content-type': 'application/json'})
data = jsonutils.dumps({'name': 'testimg',
'type': 'kernel',
'foo': 'bar',
'disk_format': 'aki',
'container_format': 'aki'})
response = requests.post(path, headers=headers, data=data)
self.assertEqual(201, response.status_code)
image = jsonutils.loads(response.text)
image_id = image['id']
# upload data
path = self._url('/v2/images/%s/file' % image_id)
headers = self._headers({'Content-Type': 'application/octet-stream'})
response = requests.put(path, headers=headers, data=data_src)
self.assertEqual(expected_status, response.status_code)
# Deletion should work
path = self._url('/v2/images/%s' % image_id)
response = requests.delete(path, headers=self._headers())
self.assertEqual(204, response.status_code)
def test_image_upload_under_quota(self):
data = 'x' * (self.user_storage_quota - 1)
self._upload_image_test(data, 204)
def test_image_upload_exceed_quota(self):
data = 'x' * (self.user_storage_quota + 1)
self._upload_image_test(data, 413)
def test_chunked_image_upload_under_quota(self):
def data_gen():
yield 'x' * (self.user_storage_quota - 1)
self._upload_image_test(data_gen(), 204)
def test_chunked_image_upload_exceed_quota(self):
def data_gen():
yield 'x' * (self.user_storage_quota + 1)
self._upload_image_test(data_gen(), 413)
class TestQuotasWithRegistry(TestQuotas):
def setUp(self):
super(TestQuotasWithRegistry, self).setUp()
self.api_server.data_api = (
'glance.tests.functional.v2.registry_data_api')
self.registry_server.deployment_flavor = 'trusted-auth'
| 43.876729
| 79
| 0.58756
|
754cf7b85e0defb27ca35cc6fc7a98a014e9aba2
| 2,736
|
py
|
Python
|
models/old_models/convtwonet.py
|
WatChMaL/CNN
|
2e14397bca6ced2fdfeab406e3c28561bb3af384
|
[
"CNRI-Python",
"RSA-MD"
] | 3
|
2019-05-10T01:38:07.000Z
|
2021-09-06T16:30:18.000Z
|
models/old_models/convtwonet.py
|
WatChMaL/VAE
|
2e14397bca6ced2fdfeab406e3c28561bb3af384
|
[
"CNRI-Python",
"RSA-MD"
] | 3
|
2019-05-11T02:44:53.000Z
|
2019-05-24T18:37:58.000Z
|
models/old_models/convtwonet.py
|
WatChMaL/CNN
|
2e14397bca6ced2fdfeab406e3c28561bb3af384
|
[
"CNRI-Python",
"RSA-MD"
] | 8
|
2019-05-06T22:39:39.000Z
|
2020-11-29T17:15:50.000Z
|
"""
convtwonet.py
PyTorch implementation of the ConvtwoNet as a classifier for IWCD detector response
The model uses only convolutional layers in the architecture removing all max pooling layers from the KazuNet.
Author : Abhishek .
"""
# PyTorch imports
import torch.nn as nn
# ConvNet class
class ConvtwoNet(nn.Module):
# Initializer
def __init__(self, num_input_channels=19, num_classes=3, train=True):
# Initialize the superclass
super(ConvtwoNet, self).__init__()
# Activation functions
self.relu = nn.ReLU()
self.softmax = nn.Softmax(dim=1)
# ------------------------------------------------------------------------
# Encoder
# ------------------------------------------------------------------------
# Feature extraction convolutions
self.en_conv1 = nn.Conv2d(num_input_channels, 64, kernel_size=3, stride=1)
self.en_conv2 = nn.Conv2d(64, 64, kernel_size=3, stride=1)
self.en_conv3 = nn.Conv2d(64, 64, kernel_size=3, stride=1)
self.en_conv4 = nn.Conv2d(64, 64, kernel_size=3, stride=1)
# Downsampling convolution
self.en_maxconv1 = nn.Conv2d(64, 64, kernel_size=2, stride=2)
# Feature extraction convolutions
self.en_conv5 = nn.Conv2d(64, 64, kernel_size=3, stride=1, padding=1)
self.en_conv6 = nn.Conv2d(64, 32, kernel_size=3, stride=1, padding=1)
self.en_conv7 = nn.Conv2d(32, 16, kernel_size=3, stride=1, padding=1)
# Fully connected layers
self.en_fc1 = nn.Linear(1024, 512)
self.en_fc2 = nn.Linear(512, 256)
self.en_fc3 = nn.Linear(256, 128)
# Classifier output layer
self.en_fc4 = nn.Linear(128, num_classes)
# Forward
def forward(self, X):
return self.classify(X)
# Classifier
def classify(self, X):
# Feature extraction convolutions
x = self.relu(self.en_conv1(X))
x = self.relu(self.en_conv2(x))
x = self.relu(self.en_conv3(x))
x = self.relu(self.en_conv4(x))
# Downsampling convolution
x = self.relu(self.en_maxconv1(x))
# Feature extraction convolutions
x = self.relu(self.en_conv5(x))
x = self.relu(self.en_conv6(x))
x = self.relu(self.en_conv7(x))
# Flattening
x = x.view(-1, 1024)
# Fully connected layers
x = self.relu(self.en_fc1(x))
x = self.relu(self.en_fc2(x))
x = self.relu(self.en_fc3(x))
# Classifier output layer
x = self.en_fc4(x)
return x
| 31.090909
| 110
| 0.555556
|
0e5247129ac0a04f4127ac571540eff71fda8b63
| 2,834
|
py
|
Python
|
avionix_airflow/kubernetes/airflow/airflow_worker_pod_template.py
|
zbrookle/avionix_airflow
|
a9b4665ce7699bcee7252a3f10d588a57c1f32c4
|
[
"BSD-3-Clause"
] | 5
|
2020-08-31T07:33:47.000Z
|
2022-01-19T09:03:09.000Z
|
avionix_airflow/kubernetes/airflow/airflow_worker_pod_template.py
|
zbrookle/avionix_airflow
|
a9b4665ce7699bcee7252a3f10d588a57c1f32c4
|
[
"BSD-3-Clause"
] | 20
|
2020-07-28T23:39:22.000Z
|
2020-10-06T20:21:32.000Z
|
avionix_airflow/kubernetes/airflow/airflow_worker_pod_template.py
|
zbrookle/avionix_airflow
|
a9b4665ce7699bcee7252a3f10d588a57c1f32c4
|
[
"BSD-3-Clause"
] | 1
|
2021-09-27T14:48:41.000Z
|
2021-09-27T14:48:41.000Z
|
from typing import List
from avionix.kube.core import ConfigMap, Container
from yaml import dump
from avionix_airflow.kubernetes.airflow.airflow_containers import AirflowWorker
from avionix_airflow.kubernetes.airflow.airflow_options import AirflowOptions
from avionix_airflow.kubernetes.airflow.airflow_pods import AirflowPodTemplate
from avionix_airflow.kubernetes.airflow.airflow_storage import StorageGroupFactory
from avionix_airflow.kubernetes.cloud.cloud_options import CloudOptions
from avionix_airflow.kubernetes.monitoring.monitoring_options import MonitoringOptions
from avionix_airflow.kubernetes.namespace_meta import AirflowMeta
from avionix_airflow.kubernetes.postgres.sql_options import SqlOptions
from avionix_airflow.kubernetes.redis.redis_options import RedisOptions
from avionix_airflow.kubernetes.value_handler import ValueOrchestrator
class AirflowWorkerPodTemplate(AirflowPodTemplate):
def __init__(
self,
sql_options: SqlOptions,
redis_options: RedisOptions,
airflow_options: AirflowOptions,
monitoring_options: MonitoringOptions,
cloud_options: CloudOptions,
name: str,
service_account: str = "default",
):
values = ValueOrchestrator()
super().__init__(
sql_options,
redis_options,
airflow_options,
monitoring_options,
cloud_options,
name,
values.worker_node_labels,
StorageGroupFactory(
airflow_options, cloud_options, airflow_options.pods_namespace
),
service_account,
"Never",
)
def _get_containers(self) -> List[Container]:
return [
AirflowWorker(
"base",
self._sql_options,
self._redis_options,
self._airflow_options,
self._monitoring_options,
self._cloud_options,
)
]
class PodTemplateWorkerConfig(ConfigMap):
def __init__(
self,
sql_options: SqlOptions,
redis_options: RedisOptions,
airflow_options: AirflowOptions,
monitoring_options: MonitoringOptions,
cloud_options: CloudOptions,
):
config_file = ValueOrchestrator().airflow_worker_pod_template_config_file
super().__init__(
AirflowMeta(config_file),
data={
config_file: dump(
AirflowWorkerPodTemplate(
sql_options,
redis_options,
airflow_options,
monitoring_options,
cloud_options,
"worker-pod-template",
).to_dict(),
)
},
)
| 34.144578
| 86
| 0.638673
|
f9a1cce4a81728731652f924807b9a2543bbd329
| 44,313
|
py
|
Python
|
tests/test_modeling_prophetnet.py
|
gp201/transformers
|
89f2781e87e92b04303f7f128107718e44e755ed
|
[
"Apache-2.0"
] | 2
|
2020-11-03T22:52:22.000Z
|
2021-11-09T10:29:16.000Z
|
tests/test_modeling_prophetnet.py
|
gp201/transformers
|
89f2781e87e92b04303f7f128107718e44e755ed
|
[
"Apache-2.0"
] | null | null | null |
tests/test_modeling_prophetnet.py
|
gp201/transformers
|
89f2781e87e92b04303f7f128107718e44e755ed
|
[
"Apache-2.0"
] | 2
|
2021-05-25T19:59:13.000Z
|
2022-02-28T18:11:12.000Z
|
# coding=utf-8
# Copyright 2020 The HuggingFace Inc. team, The Microsoft Research team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import copy
import tempfile
import unittest
from transformers import is_torch_available
from transformers.testing_utils import require_torch, slow, torch_device
from .test_configuration_common import ConfigTester
from .test_generation_utils import GenerationTesterMixin
from .test_modeling_common import ModelTesterMixin, floats_tensor, ids_tensor
if is_torch_available():
import torch
from transformers import (
ProphetNetConfig,
ProphetNetDecoder,
ProphetNetEncoder,
ProphetNetForCausalLM,
ProphetNetForConditionalGeneration,
ProphetNetModel,
ProphetNetTokenizer,
)
class ProphetNetModelTester:
def __init__(
self,
parent,
vocab_size=99,
batch_size=13,
hidden_size=16,
encoder_seq_length=7,
decoder_seq_length=9,
# For common tests
is_training=True,
use_attention_mask=True,
use_labels=True,
decoder_start_token_id=0,
encoder_ffn_dim=32,
num_encoder_layers=4,
num_encoder_attention_heads=4,
decoder_ffn_dim=32,
num_decoder_layers=4,
num_decoder_attention_heads=4,
max_position_embeddings=30,
is_encoder_decoder=True,
pad_token_id=0,
bos_token_id=1,
eos_token_id=2,
ngram=2,
num_buckets=32,
relative_max_distance=128,
disable_ngram_loss=False,
scope=None,
):
self.parent = parent
self.batch_size = batch_size
self.encoder_seq_length = encoder_seq_length
self.decoder_seq_length = decoder_seq_length
# For common tests
self.seq_length = self.decoder_seq_length
self.is_training = is_training
self.use_attention_mask = use_attention_mask
self.use_labels = use_labels
self.vocab_size = vocab_size
self.hidden_size = hidden_size
self.num_hidden_layers = num_decoder_layers
self.num_encoder_layers = num_encoder_layers
self.num_decoder_layers = num_decoder_layers
self.decoder_ffn_dim = decoder_ffn_dim
self.encoder_ffn_dim = encoder_ffn_dim
self.num_attention_heads = num_decoder_attention_heads
self.num_encoder_attention_heads = num_encoder_attention_heads
self.num_decoder_attention_heads = num_decoder_attention_heads
self.eos_token_id = eos_token_id
self.bos_token_id = bos_token_id
self.pad_token_id = pad_token_id
self.decoder_start_token_id = decoder_start_token_id
self.ngram = ngram
self.num_buckets = num_buckets
self.relative_max_distance = relative_max_distance
self.disable_ngram_loss = disable_ngram_loss
self.max_position_embeddings = max_position_embeddings
self.is_encoder_decoder = is_encoder_decoder
self.scope = None
self.decoder_key_length = decoder_seq_length
self.base_model_out_len = 7
self.num_hidden_states_types = 3 # encoder, decoder_main, decoder_ngram
self.decoder_attention_idx = 2
def prepare_config_and_inputs(self):
input_ids = ids_tensor([self.batch_size, self.encoder_seq_length], self.vocab_size)
decoder_input_ids = ids_tensor([self.batch_size, self.decoder_seq_length], self.vocab_size)
attention_mask = None
decoder_attention_mask = None
if self.use_attention_mask:
attention_mask = ids_tensor([self.batch_size, self.encoder_seq_length], vocab_size=2)
decoder_attention_mask = ids_tensor([self.batch_size, self.decoder_seq_length], vocab_size=2)
lm_labels = None
if self.use_labels:
lm_labels = ids_tensor([self.batch_size, self.decoder_seq_length], self.vocab_size)
config = ProphetNetConfig(
vocab_size=self.vocab_size,
hidden_size=self.hidden_size,
num_encoder_layers=self.num_encoder_layers,
num_decoder_layers=self.num_decoder_layers,
decoder_ffn_dim=self.decoder_ffn_dim,
encoder_ffn_dim=self.encoder_ffn_dim,
num_encoder_attention_heads=self.num_encoder_attention_heads,
num_decoder_attention_heads=self.num_decoder_attention_heads,
eos_token_id=self.eos_token_id,
bos_token_id=self.bos_token_id,
pad_token_id=self.pad_token_id,
decoder_start_token_id=self.decoder_start_token_id,
ngram=self.ngram,
num_buckets=self.num_buckets,
relative_max_distance=self.relative_max_distance,
disable_ngram_loss=self.disable_ngram_loss,
max_position_embeddings=self.max_position_embeddings,
is_encoder_decoder=self.is_encoder_decoder,
return_dict=True,
)
return (
config,
input_ids,
decoder_input_ids,
attention_mask,
decoder_attention_mask,
lm_labels,
)
def prepare_config_and_inputs_for_decoder(self):
(
config,
input_ids,
decoder_input_ids,
attention_mask,
decoder_attention_mask,
lm_labels,
) = self.prepare_config_and_inputs()
encoder_hidden_states = floats_tensor([self.batch_size, self.encoder_seq_length, self.hidden_size])
encoder_attention_mask = ids_tensor([self.batch_size, self.encoder_seq_length], vocab_size=2)
return (
config,
decoder_input_ids,
decoder_attention_mask,
encoder_hidden_states,
encoder_attention_mask,
lm_labels,
)
def check_prepare_lm_labels_via_shift_left(
self,
config,
input_ids,
decoder_input_ids,
attention_mask,
decoder_attention_mask,
lm_labels,
):
model = ProphetNetModel(config=config)
model.to(torch_device)
model.eval()
# make sure that lm_labels are correctly padded from the right
lm_labels.masked_fill_((lm_labels == self.decoder_start_token_id), self.eos_token_id)
# add casaul pad token mask
triangular_mask = torch.tril(lm_labels.new_ones(lm_labels.shape)).logical_not()
lm_labels.masked_fill_(triangular_mask, self.pad_token_id)
decoder_input_ids = model._shift_right(lm_labels)
for i, (decoder_input_ids_slice, lm_labels_slice) in enumerate(zip(decoder_input_ids, lm_labels)):
# first item
self.parent.assertEqual(decoder_input_ids_slice[0].item(), self.decoder_start_token_id)
if i < decoder_input_ids_slice.shape[-1]:
if i < decoder_input_ids.shape[-1] - 1:
# items before diagonal
self.parent.assertListEqual(
decoder_input_ids_slice[1 : i + 1].tolist(), lm_labels_slice[:i].tolist()
)
# pad items after diagonal
if i < decoder_input_ids.shape[-1] - 2:
self.parent.assertListEqual(
decoder_input_ids_slice[i + 2 :].tolist(), lm_labels_slice[i + 1 : -1].tolist()
)
else:
# all items after square
self.parent.assertListEqual(decoder_input_ids_slice[1:].tolist(), lm_labels_slice[:-1].tolist())
def create_and_check_model(
self,
config,
input_ids,
decoder_input_ids,
attention_mask,
decoder_attention_mask,
lm_labels,
):
model = ProphetNetModel(config=config)
model.to(torch_device)
model.eval()
result = model(
input_ids=input_ids,
decoder_input_ids=decoder_input_ids,
attention_mask=attention_mask,
decoder_attention_mask=decoder_attention_mask,
)
result = model(input_ids=input_ids, decoder_input_ids=decoder_input_ids)
decoder_output = result.last_hidden_state
decoder_past = result.past_key_values
encoder_output = result.encoder_last_hidden_state
self.parent.assertEqual(encoder_output.size(), (self.batch_size, self.encoder_seq_length, self.hidden_size))
self.parent.assertEqual(decoder_output.size(), (self.batch_size, self.decoder_seq_length, self.hidden_size))
# There should be `num_layers` key value embeddings stored in decoder_past
self.parent.assertEqual(len(decoder_past), config.num_decoder_layers)
# There should be a self attn key, a self attn value, a cross attn key and a cross attn value stored in each decoder_past tuple
self.parent.assertEqual(len(decoder_past[0]), 2) # cross-attention + uni-directional self-attention
def create_and_check_with_lm_head(
self,
config,
input_ids,
decoder_input_ids,
attention_mask,
decoder_attention_mask,
lm_labels,
):
model = ProphetNetForConditionalGeneration(config=config).to(torch_device).eval()
outputs = model(
input_ids=input_ids,
decoder_input_ids=decoder_input_ids,
decoder_attention_mask=decoder_attention_mask,
labels=lm_labels,
)
self.parent.assertEqual(len(outputs), 5)
self.parent.assertEqual(outputs["logits"].size(), (self.batch_size, self.decoder_seq_length, self.vocab_size))
self.parent.assertEqual(outputs["loss"].size(), ())
def create_and_check_causal_lm_decoder(
self,
config,
input_ids,
decoder_input_ids,
attention_mask,
decoder_attention_mask,
lm_labels,
):
model = ProphetNetForCausalLM(config=config).to(torch_device).eval()
outputs = model(
input_ids=decoder_input_ids,
attention_mask=decoder_attention_mask,
labels=lm_labels,
)
self.parent.assertEqual(len(outputs), 4)
self.parent.assertEqual(outputs["logits"].size(), (self.batch_size, self.decoder_seq_length, self.vocab_size))
self.parent.assertEqual(outputs["loss"].size(), ())
def create_and_check_generate_with_past_key_value_states(
self,
config,
input_ids,
decoder_input_ids,
attention_mask,
decoder_attention_mask,
lm_labels,
):
model = ProphetNetForConditionalGeneration(config=config).to(torch_device).eval()
torch.manual_seed(0)
output_without_past_cache = model.generate(
input_ids[:1], num_beams=2, max_length=5, do_sample=True, use_cache=False
)
torch.manual_seed(0)
output_with_past_cache = model.generate(input_ids[:1], num_beams=2, max_length=5, do_sample=True)
self.parent.assertTrue(torch.all(output_with_past_cache == output_without_past_cache))
def create_and_check_model_fp16_forward(
self,
config,
input_ids,
decoder_input_ids,
attention_mask,
decoder_attention_mask,
lm_labels,
):
model = ProphetNetModel(config=config).to(torch_device).half().eval()
output = model(input_ids, decoder_input_ids=input_ids, attention_mask=attention_mask)["last_hidden_state"]
self.parent.assertFalse(torch.isnan(output).any().item())
def create_and_check_encoder_decoder_shared_weights(
self,
config,
input_ids,
decoder_input_ids,
attention_mask,
decoder_attention_mask,
lm_labels,
):
for model_class in [ProphetNetModel, ProphetNetForConditionalGeneration]:
torch.manual_seed(0)
model = model_class(config=config).to(torch_device).eval()
# load state dict copies weights but does not tie them
if model_class == ProphetNetForConditionalGeneration:
model.prophetnet.encoder.load_state_dict(model.prophetnet.decoder.state_dict(), strict=False)
else:
model.encoder.load_state_dict(model.decoder.state_dict(), strict=False)
torch.manual_seed(0)
tied_config = copy.deepcopy(config)
tied_config.tie_encoder_decoder = True
tied_model = model_class(config=tied_config).to(torch_device).eval()
model_result = model(
input_ids=input_ids,
decoder_input_ids=decoder_input_ids,
attention_mask=attention_mask,
decoder_attention_mask=decoder_attention_mask,
return_dict=True,
)
tied_model_result = tied_model(
input_ids=input_ids,
decoder_input_ids=decoder_input_ids,
attention_mask=attention_mask,
decoder_attention_mask=decoder_attention_mask,
return_dict=True,
)
# check that models has less parameters
self.parent.assertLess(
sum(p.numel() for p in tied_model.parameters()), sum(p.numel() for p in model.parameters())
)
random_slice_idx = ids_tensor((1,), model_result[0].shape[-1]).item()
# check that outputs are equal
self.parent.assertTrue(
torch.allclose(
model_result[0][0, :, random_slice_idx], tied_model_result[0][0, :, random_slice_idx], atol=1e-4
)
)
# check that outputs after saving and loading are equal
with tempfile.TemporaryDirectory() as tmpdirname:
tied_model.save_pretrained(tmpdirname)
tied_model = model_class.from_pretrained(tmpdirname)
tied_model.to(torch_device)
tied_model.eval()
# check that models has less parameters
self.parent.assertLess(
sum(p.numel() for p in tied_model.parameters()), sum(p.numel() for p in model.parameters())
)
random_slice_idx = ids_tensor((1,), model_result[0].shape[-1]).item()
tied_model_result = tied_model(
input_ids=input_ids,
decoder_input_ids=decoder_input_ids,
attention_mask=attention_mask,
decoder_attention_mask=decoder_attention_mask,
)
# check that outputs are equal
self.parent.assertTrue(
torch.allclose(
model_result[0][0, :, random_slice_idx],
tied_model_result[0][0, :, random_slice_idx],
atol=1e-4,
)
)
def check_fast_integration(
self,
config,
*args,
):
input_ids = torch.tensor([[7, 4, 78, 0, 24, 52, 43]], device=torch_device, dtype=torch.long)
decoder_input_ids = torch.tensor([[12, 62, 25, 11, 47, 15, 14]], device=torch_device, dtype=torch.long)
attention_mask = torch.tensor([[1, 1, 1, 0, 1, 0, 0]], device=torch_device, dtype=torch.long)
decoder_attention_mask = torch.tensor([[1, 1, 1, 0, 0, 1, 0]], device=torch_device, dtype=torch.long)
lm_labels = torch.tensor([[62, 25, 11, 47, 15, 14, 24]], device=torch_device, dtype=torch.long)
torch.manual_seed(0)
config.ngram = 4
model = ProphetNetForConditionalGeneration(config=config)
model.to(torch_device)
model.eval()
with torch.no_grad():
result = model(
input_ids=input_ids,
decoder_input_ids=decoder_input_ids,
attention_mask=attention_mask,
decoder_attention_mask=decoder_attention_mask,
labels=lm_labels,
return_dict=True,
)
self.parent.assertTrue(torch.allclose(result.loss, torch.tensor(128.2925, device=torch_device), atol=1e-3))
expected_logit_slice = torch.tensor(
[-0.1565, 0.0418, 0.1207, 0.0030, 0.0665, 0.0467, 0.0412], device=torch_device
)
self.parent.assertTrue(torch.allclose(result.logits[0, :, 1], expected_logit_slice, atol=1e-3))
def check_model_with_attn_mask(self, config, input_ids, decoder_input_ids, *args):
model = ProphetNetModel(config=config)
model.to(torch_device)
model.eval()
outputs_no_mask = model(
input_ids=input_ids[:, :5], decoder_input_ids=decoder_input_ids[:, :5], return_dict=True
)
attention_mask = torch.ones_like(input_ids)
decoder_attention_mask = torch.ones_like(decoder_input_ids)
attention_mask[:, 5:] = 0
outputs_with_mask = model(
input_ids=input_ids,
attention_mask=attention_mask,
decoder_input_ids=decoder_input_ids,
decoder_attention_mask=decoder_attention_mask,
return_dict=True,
)
# check encoder
self.parent.assertTrue(
torch.allclose(
outputs_no_mask.encoder_last_hidden_state[0, :, 0],
outputs_with_mask.encoder_last_hidden_state[0, :5, 0],
atol=1e-3,
)
)
# check decoder
# main stream
self.parent.assertTrue(
torch.allclose(
outputs_no_mask.last_hidden_state[0, :, 0], outputs_with_mask.last_hidden_state[0, :5, 0], atol=1e-3
)
)
# predict stream
self.parent.assertTrue(
torch.allclose(
outputs_no_mask.last_hidden_state_ngram[0, :5, 0],
outputs_with_mask.last_hidden_state_ngram[0, :5, 0],
atol=1e-3,
)
)
def prepare_config_and_inputs_for_common(self):
config_and_inputs = self.prepare_config_and_inputs()
(
config,
input_ids,
decoder_input_ids,
attention_mask,
decoder_attention_mask,
lm_labels,
) = config_and_inputs
inputs_dict = {
"input_ids": input_ids,
"attention_mask": attention_mask,
"decoder_input_ids": decoder_input_ids,
"decoder_attention_mask": decoder_attention_mask,
"use_cache": False,
}
return config, inputs_dict
class ProphetNetStandaloneDecoderModelTester:
def __init__(
self,
parent,
vocab_size=99,
batch_size=13,
hidden_size=16,
encoder_seq_length=7,
decoder_seq_length=7,
# For common tests
is_training=True,
is_decoder=True,
use_attention_mask=True,
add_cross_attention=False,
use_cache=False,
use_labels=True,
decoder_start_token_id=0,
encoder_ffn_dim=32,
num_encoder_layers=4,
num_encoder_attention_heads=4,
decoder_ffn_dim=32,
num_decoder_layers=4,
num_decoder_attention_heads=4,
max_position_embeddings=30,
is_encoder_decoder=False,
pad_token_id=0,
bos_token_id=1,
eos_token_id=2,
ngram=2,
return_dict=True,
num_buckets=32,
relative_max_distance=128,
disable_ngram_loss=False,
scope=None,
):
self.parent = parent
self.batch_size = batch_size
self.encoder_seq_length = encoder_seq_length
self.decoder_seq_length = decoder_seq_length
# For common tests
self.seq_length = self.decoder_seq_length
self.is_training = is_training
self.use_attention_mask = use_attention_mask
self.use_labels = use_labels
self.vocab_size = vocab_size
self.hidden_size = hidden_size
self.num_hidden_layers = num_decoder_layers
self.num_encoder_layers = num_encoder_layers
self.num_decoder_layers = num_decoder_layers
self.decoder_ffn_dim = decoder_ffn_dim
self.encoder_ffn_dim = encoder_ffn_dim
self.num_attention_heads = num_decoder_attention_heads
self.num_encoder_attention_heads = num_encoder_attention_heads
self.num_decoder_attention_heads = num_decoder_attention_heads
self.eos_token_id = eos_token_id
self.bos_token_id = bos_token_id
self.pad_token_id = pad_token_id
self.decoder_start_token_id = decoder_start_token_id
self.ngram = ngram
self.num_buckets = num_buckets
self.relative_max_distance = relative_max_distance
self.use_cache = use_cache
self.disable_ngram_loss = disable_ngram_loss
self.max_position_embeddings = max_position_embeddings
self.add_cross_attention = add_cross_attention
self.is_encoder_decoder = is_encoder_decoder
self.return_dict = return_dict
self.scope = None
self.decoder_key_length = decoder_seq_length
self.base_model_out_len = 2
self.num_hidden_states_types = 2 # decoder_main, decoder_ngram
self.decoder_attention_idx = 1
def prepare_config_and_inputs(self):
input_ids = ids_tensor([self.batch_size, self.encoder_seq_length], self.vocab_size)
attention_mask = None
if self.use_attention_mask:
attention_mask = ids_tensor([self.batch_size, self.encoder_seq_length], vocab_size=2)
lm_labels = None
if self.use_labels:
lm_labels = ids_tensor([self.batch_size, self.encoder_seq_length], self.vocab_size)
config = ProphetNetConfig(
vocab_size=self.vocab_size,
hidden_size=self.hidden_size,
num_encoder_layers=self.num_encoder_layers,
num_decoder_layers=self.num_decoder_layers,
decoder_ffn_dim=self.decoder_ffn_dim,
encoder_ffn_dim=self.encoder_ffn_dim,
num_encoder_attention_heads=self.num_encoder_attention_heads,
num_decoder_attention_heads=self.num_decoder_attention_heads,
eos_token_id=self.eos_token_id,
bos_token_id=self.bos_token_id,
use_cache=self.use_cache,
pad_token_id=self.pad_token_id,
decoder_start_token_id=self.decoder_start_token_id,
ngram=self.ngram,
num_buckets=self.num_buckets,
relative_max_distance=self.relative_max_distance,
disable_ngram_loss=self.disable_ngram_loss,
max_position_embeddings=self.max_position_embeddings,
add_cross_attention=self.add_cross_attention,
is_encoder_decoder=self.is_encoder_decoder,
return_dict=self.return_dict,
)
return (
config,
input_ids,
attention_mask,
lm_labels,
)
def prepare_config_and_inputs_for_decoder(self):
(
config,
input_ids,
attention_mask,
lm_labels,
) = self.prepare_config_and_inputs()
encoder_hidden_states = floats_tensor([self.batch_size, self.encoder_seq_length, self.hidden_size])
encoder_attention_mask = ids_tensor([self.batch_size, self.encoder_seq_length], vocab_size=2)
return (
config,
input_ids,
attention_mask,
encoder_hidden_states,
encoder_attention_mask,
lm_labels,
)
def create_and_check_decoder_model_past(
self,
config,
input_ids,
attention_mask,
lm_labels,
):
config.use_cache = True
model = ProphetNetDecoder(config=config).to(torch_device).eval()
# first forward pass
outputs = model(input_ids, use_cache=True)
outputs_use_cache_conf = model(input_ids)
outputs_no_past = model(input_ids, use_cache=False)
self.parent.assertTrue(len(outputs) == len(outputs_use_cache_conf))
self.parent.assertTrue(len(outputs) == len(outputs_no_past) + 1)
past_key_values = outputs["past_key_values"]
# create hypothetical next token and extent to next_input_ids
next_tokens = ids_tensor((self.batch_size, 1), config.vocab_size)
# append to next input_ids and
next_input_ids = torch.cat([input_ids, next_tokens], dim=-1)
output_from_no_past = model(next_input_ids)["last_hidden_state"]
output_from_past = model(next_tokens, past_key_values=past_key_values)["last_hidden_state"]
# select random slice
random_slice_idx = ids_tensor((1,), output_from_past.shape[-1]).item()
output_from_no_past_slice = output_from_no_past[:, next_input_ids.shape[-1] - 1, random_slice_idx].detach()
output_from_past_slice = output_from_past[:, 0, random_slice_idx].detach()
# test that outputs are equal for slice
assert torch.allclose(output_from_past_slice, output_from_no_past_slice, atol=1e-3)
def create_and_check_decoder_model_attention_mask_past(
self,
config,
input_ids,
attention_mask,
lm_labels,
):
model = ProphetNetDecoder(config=config).to(torch_device).eval()
# create attention mask
attn_mask = torch.ones(input_ids.shape, dtype=torch.long, device=torch_device)
half_seq_length = input_ids.shape[-1] // 2
attn_mask[:, half_seq_length:] = 0
# first forward pass
past_key_values = model(input_ids, attention_mask=attn_mask, use_cache=True)["past_key_values"]
# create hypothetical next token and extent to next_input_ids
next_tokens = ids_tensor((self.batch_size, 1), config.vocab_size)
# change a random masked slice from input_ids
random_seq_idx_to_change = ids_tensor((1,), half_seq_length).item() + 1
random_other_next_tokens = ids_tensor((self.batch_size, 1), config.vocab_size).squeeze(-1)
input_ids[:, -random_seq_idx_to_change] = random_other_next_tokens
# append to next input_ids and attn_mask
next_input_ids = torch.cat([input_ids, next_tokens], dim=-1)
attn_mask = torch.cat(
[attn_mask, torch.ones((attn_mask.shape[0], 1), dtype=torch.long, device=torch_device)],
dim=1,
)
# get two different outputs
output_from_no_past = model(next_input_ids)["last_hidden_state"]
output_from_past = model(next_tokens, past_key_values=past_key_values)["last_hidden_state"]
# select random slice
random_slice_idx = ids_tensor((1,), output_from_past.shape[-1]).item()
output_from_no_past_slice = output_from_no_past[:, next_input_ids.shape[-1] - 1, random_slice_idx].detach()
output_from_past_slice = output_from_past[:, 0, random_slice_idx].detach()
# test that outputs are equal for slice
assert torch.allclose(output_from_past_slice, output_from_no_past_slice, atol=1e-2)
def prepare_config_and_inputs_for_common(self):
config_and_inputs = self.prepare_config_and_inputs()
(
config,
input_ids,
attention_mask,
lm_labels,
) = config_and_inputs
inputs_dict = {
"input_ids": input_ids,
"attention_mask": attention_mask,
}
return config, inputs_dict
class ProphetNetStandaloneEncoderModelTester:
def __init__(
self,
parent,
vocab_size=99,
batch_size=13,
hidden_size=16,
encoder_seq_length=7,
decoder_seq_length=7,
# For common tests
is_training=True,
is_decoder=False,
use_attention_mask=True,
add_cross_attention=False,
use_cache=False,
use_labels=True,
decoder_start_token_id=0,
encoder_ffn_dim=32,
num_encoder_layers=4,
num_encoder_attention_heads=4,
decoder_ffn_dim=32,
num_decoder_layers=4,
num_decoder_attention_heads=4,
max_position_embeddings=30,
is_encoder_decoder=False,
pad_token_id=0,
bos_token_id=1,
eos_token_id=2,
return_dict=True,
num_buckets=32,
relative_max_distance=128,
disable_ngram_loss=False,
scope=None,
):
self.parent = parent
self.batch_size = batch_size
self.encoder_seq_length = encoder_seq_length
self.decoder_seq_length = decoder_seq_length
# For common tests
self.seq_length = self.decoder_seq_length
self.is_training = is_training
self.use_attention_mask = use_attention_mask
self.use_labels = use_labels
self.vocab_size = vocab_size
self.hidden_size = hidden_size
self.num_hidden_layers = num_decoder_layers
self.num_encoder_layers = num_encoder_layers
self.num_decoder_layers = num_decoder_layers
self.decoder_ffn_dim = decoder_ffn_dim
self.encoder_ffn_dim = encoder_ffn_dim
self.num_attention_heads = num_decoder_attention_heads
self.num_encoder_attention_heads = num_encoder_attention_heads
self.num_decoder_attention_heads = num_decoder_attention_heads
self.eos_token_id = eos_token_id
self.bos_token_id = bos_token_id
self.pad_token_id = pad_token_id
self.decoder_start_token_id = decoder_start_token_id
self.num_buckets = num_buckets
self.relative_max_distance = relative_max_distance
self.use_cache = use_cache
self.disable_ngram_loss = disable_ngram_loss
self.max_position_embeddings = max_position_embeddings
self.add_cross_attention = add_cross_attention
self.is_encoder_decoder = is_encoder_decoder
self.return_dict = return_dict
self.scope = None
self.decoder_key_length = decoder_seq_length
self.base_model_out_len = 1
self.num_hidden_states_types = 1
self.decoder_attention_idx = 1
def prepare_config_and_inputs(self):
input_ids = ids_tensor([self.batch_size, self.encoder_seq_length], self.vocab_size)
attention_mask = None
if self.use_attention_mask:
attention_mask = ids_tensor([self.batch_size, self.encoder_seq_length], vocab_size=2)
config = ProphetNetConfig(
vocab_size=self.vocab_size,
hidden_size=self.hidden_size,
num_encoder_layers=self.num_encoder_layers,
num_decoder_layers=self.num_decoder_layers,
decoder_ffn_dim=self.decoder_ffn_dim,
encoder_ffn_dim=self.encoder_ffn_dim,
num_encoder_attention_heads=self.num_encoder_attention_heads,
num_decoder_attention_heads=self.num_decoder_attention_heads,
eos_token_id=self.eos_token_id,
bos_token_id=self.bos_token_id,
use_cache=self.use_cache,
pad_token_id=self.pad_token_id,
decoder_start_token_id=self.decoder_start_token_id,
num_buckets=self.num_buckets,
relative_max_distance=self.relative_max_distance,
disable_ngram_loss=self.disable_ngram_loss,
max_position_embeddings=self.max_position_embeddings,
add_cross_attention=self.add_cross_attention,
is_encoder_decoder=self.is_encoder_decoder,
return_dict=self.return_dict,
)
return (
config,
input_ids,
attention_mask,
)
def prepare_config_and_inputs_for_common(self):
config_and_inputs = self.prepare_config_and_inputs()
(
config,
input_ids,
attention_mask,
) = config_and_inputs
inputs_dict = {
"input_ids": input_ids,
"attention_mask": attention_mask,
}
return config, inputs_dict
@require_torch
class ProphetNetModelTest(ModelTesterMixin, GenerationTesterMixin, unittest.TestCase):
all_model_classes = (ProphetNetModel, ProphetNetForConditionalGeneration) if is_torch_available() else ()
all_generative_model_classes = (ProphetNetForConditionalGeneration,) if is_torch_available() else ()
test_pruning = False
test_torchscript = False
test_resize_embeddings = False
test_headmasking = False
is_encoder_decoder = True
def setUp(self):
self.model_tester = ProphetNetModelTester(self)
self.config_tester = ConfigTester(self, config_class=ProphetNetConfig)
def test_config(self):
self.config_tester.run_common_tests()
def test_model(self):
config_and_inputs = self.model_tester.prepare_config_and_inputs()
self.model_tester.create_and_check_model(*config_and_inputs)
def test_lm_model(self):
config_and_inputs = self.model_tester.prepare_config_and_inputs()
self.model_tester.create_and_check_with_lm_head(*config_and_inputs)
def test_only_decoder_causal_model(self):
config_and_inputs = self.model_tester.prepare_config_and_inputs()
self.model_tester.create_and_check_causal_lm_decoder(*config_and_inputs)
def test_fast_integration(self):
config_and_inputs = self.model_tester.prepare_config_and_inputs()
self.model_tester.check_fast_integration(*config_and_inputs)
def test_shared_weights(self):
config_and_inputs = self.model_tester.prepare_config_and_inputs()
self.model_tester.create_and_check_encoder_decoder_shared_weights(*config_and_inputs)
def test_shift_labels_via_shift_left(self):
config_and_inputs = self.model_tester.prepare_config_and_inputs()
self.model_tester.check_prepare_lm_labels_via_shift_left(*config_and_inputs)
def test_decoder_model_generate(self):
config_and_inputs = self.model_tester.prepare_config_and_inputs()
self.model_tester.create_and_check_generate_with_past_key_value_states(*config_and_inputs)
def test_attn_mask_model(self):
config_and_inputs = self.model_tester.prepare_config_and_inputs()
self.model_tester.check_model_with_attn_mask(*config_and_inputs)
def test_config_save(self):
config = self.model_tester.prepare_config_and_inputs()[0]
config.add_cross_attention = False
with tempfile.TemporaryDirectory() as tmp_dirname:
config.save_pretrained(tmp_dirname)
config = ProphetNetConfig.from_pretrained(tmp_dirname)
self.assertFalse(config.add_cross_attention)
@unittest.skipIf(torch_device == "cpu", "Cant do half precision")
def test_fp16_forward(self):
config_and_inputs = self.model_tester.prepare_config_and_inputs()
self.model_tester.create_and_check_model_fp16_forward(*config_and_inputs)
@require_torch
class ProphetNetStandaloneDecoderModelTest(ModelTesterMixin, GenerationTesterMixin, unittest.TestCase):
all_model_classes = (ProphetNetDecoder, ProphetNetForCausalLM) if is_torch_available() else ()
all_generative_model_classes = (ProphetNetForCausalLM,) if is_torch_available() else ()
test_pruning = False
test_torchscript = False
test_resize_embeddings = False
test_headmasking = False
is_encoder_decoder = False
def setUp(self):
self.model_tester = ProphetNetStandaloneDecoderModelTester(self)
self.config_tester = ConfigTester(self, config_class=ProphetNetConfig)
def test_config(self):
self.config_tester.run_common_tests()
def test_decoder_model_past(self):
config_and_inputs = self.model_tester.prepare_config_and_inputs()
self.model_tester.create_and_check_decoder_model_past(*config_and_inputs)
def test_decoder_model_attn_mask_past(self):
config_and_inputs = self.model_tester.prepare_config_and_inputs()
self.model_tester.create_and_check_decoder_model_attention_mask_past(*config_and_inputs)
@require_torch
class ProphetNetStandaloneEncoderModelTest(ModelTesterMixin, unittest.TestCase):
all_model_classes = (ProphetNetEncoder,) if is_torch_available() else ()
test_pruning = False
test_torchscript = False
test_resize_embeddings = False
test_headmasking = False
is_encoder_decoder = False
def setUp(self):
self.model_tester = ProphetNetStandaloneEncoderModelTester(self)
self.config_tester = ConfigTester(self, config_class=ProphetNetConfig)
def test_config(self):
self.config_tester.run_common_tests()
@require_torch
class ProphetNetModelIntegrationTest(unittest.TestCase):
@slow
def test_pretrained_checkpoint_hidden_states(self):
model = ProphetNetForConditionalGeneration.from_pretrained("microsoft/prophetnet-large-uncased")
model.to(torch_device)
# encoder-decoder outputs
encoder_ids = torch.tensor(
[
[
2871,
102,
2048,
3176,
2780,
1997,
2871,
26727,
2169,
2097,
12673,
1996,
8457,
2006,
2049,
8240,
2859,
2799,
1012,
2023,
6512,
2038,
2174,
13977,
2195,
25962,
1012,
102,
]
]
).to(torch_device)
decoder_prev_ids = torch.tensor([[102, 2129, 2116, 2372, 2024, 2006, 2169, 1997, 2122, 2048, 2780, 1029]]).to(
torch_device
)
output = model(
input_ids=encoder_ids,
attention_mask=None,
encoder_outputs=None,
decoder_input_ids=decoder_prev_ids,
return_dict=True,
)
output_predited_logits = output[0]
expected_shape = torch.Size((1, 12, 30522))
self.assertEqual(output_predited_logits.shape, expected_shape)
expected_slice = torch.tensor(
[[[-7.6213, -7.9008, -7.9979], [-7.6834, -7.8467, -8.2187], [-7.5326, -7.4762, -8.1914]]]
).to(torch_device)
# self.assertTrue(torch.allclose(output_predited_logits[:, :3, :3], expected_slice, atol=1e-4))
assert torch.allclose(output_predited_logits[:, :3, :3], expected_slice, atol=1e-4)
# encoder outputs
encoder_outputs = model.prophetnet.encoder(encoder_ids)[0]
expected_encoder_outputs_slice = torch.tensor(
[[[-0.2526, -0.1951, -0.2185], [-0.8923, 0.2992, -0.4623], [-0.4585, 0.0165, -0.6652]]]
).to(torch_device)
expected_shape_encoder = torch.Size((1, 28, 1024))
self.assertEqual(encoder_outputs.shape, expected_shape_encoder)
# self.assertTrue(torch.allclose(encoder_outputs[:, :3, :3], expected_encoder_outputs_slice, atol=1e-4))
assert torch.allclose(encoder_outputs[:, :3, :3], expected_encoder_outputs_slice, atol=1e-4)
# decoder outputs
decoder_outputs = model.prophetnet.decoder(
decoder_prev_ids, encoder_hidden_states=encoder_outputs, return_dict=True
)
predicting_streams = decoder_outputs[1].view(1, model.config.ngram, 12, -1)
predicting_streams_logits = model.lm_head(predicting_streams)
next_first_stream_logits = predicting_streams_logits[:, 0]
# self.assertTrue(torch.allclose(next_first_stream_logits[:, :3, :3], expected_slice, atol=1e-4))
assert torch.allclose(next_first_stream_logits[:, :3, :3], expected_slice, atol=1e-4)
@slow
def test_cnndm_inference(self):
model = ProphetNetForConditionalGeneration.from_pretrained("microsoft/prophetnet-large-uncased-cnndm")
model.config.max_length = 512
model.to(torch_device)
tokenizer = ProphetNetTokenizer.from_pretrained("microsoft/prophetnet-large-uncased-cnndm")
ARTICLE_TO_SUMMARIZE = "USTC was founded in Beijing by the Chinese Academy of Sciences (CAS) in September 1958. The Director of CAS, Mr. Guo Moruo was appointed the first president of USTC. USTC's founding mission was to develop a high-level science and technology workforce, as deemed critical for development of China's economy, defense, and science and technology education. The establishment was hailed as \"A Major Event in the History of Chinese Education and Science.\" CAS has supported USTC by combining most of its institutes with the departments of the university. USTC is listed in the top 16 national key universities, becoming the youngest national key university.".lower()
input_ids = tokenizer([ARTICLE_TO_SUMMARIZE], max_length=511, return_tensors="pt").input_ids
input_ids = input_ids.to(torch_device)
summary_ids = model.generate(
input_ids, num_beams=4, length_penalty=1.0, no_repeat_ngram_size=3, early_stopping=True
)
EXPECTED_SUMMARIZE_512 = "us ##tc was founded by the chinese academy of sciences ( cas ) in 1958 . [X_SEP] us ##tc is listed in the top 16 national key universities ."
generated_titles = [
" ".join(tokenizer.convert_ids_to_tokens(g, skip_special_tokens=True)) for g in summary_ids
]
self.assertListEqual(
[EXPECTED_SUMMARIZE_512],
generated_titles,
)
input_ids = tokenizer([ARTICLE_TO_SUMMARIZE], max_length=99, return_tensors="pt").input_ids
input_ids = input_ids.to(torch_device)
# actually 98 tokens are used. max_length=100 contains bos and eos.
summary_ids = model.generate(
input_ids, num_beams=4, length_penalty=1.0, no_repeat_ngram_size=3, early_stopping=True
)
EXPECTED_SUMMARIZE_100 = (
r"us ##tc was founded in beijing by the chinese academy of sciences ( cas ) in 1958 . [X_SEP] us ##tc "
"'"
' s founding mission was to develop a high - level science and technology workforce . [X_SEP] establishment hailed as " a major event in the history of chinese education and science "'
)
generated_titles = [
" ".join(tokenizer.convert_ids_to_tokens(g, skip_special_tokens=True)) for g in summary_ids
]
self.assertListEqual(
[EXPECTED_SUMMARIZE_100],
generated_titles,
)
@slow
def test_question_gen_inference(self):
model = ProphetNetForConditionalGeneration.from_pretrained("microsoft/prophetnet-large-uncased-squad-qg")
model.to(torch_device)
tokenizer = ProphetNetTokenizer.from_pretrained("microsoft/prophetnet-large-uncased-squad-qg")
INPUTS = [
"Bill Gates [SEP] Microsoft was founded by Bill Gates and Paul Allen on April 4, 1975.",
"1975 [SEP] Microsoft was founded by Bill Gates and Paul Allen on April 4, 1975.",
"April 4, 1975 [SEP] Microsoft was founded by Bill Gates and Paul Allen on April 4, 1975.",
]
input_ids = tokenizer(INPUTS, truncation=True, padding=True, return_tensors="pt").input_ids
input_ids = input_ids.to(torch_device)
gen_output = model.generate(input_ids, num_beams=5, early_stopping=True)
generated_questions = tokenizer.batch_decode(gen_output, skip_special_tokens=True)
EXPECTED_QUESTIONS = [
"along with paul allen, who founded microsoft?",
"what year was microsoft founded?",
"on what date was microsoft founded?",
]
self.assertListEqual(
EXPECTED_QUESTIONS,
generated_questions,
)
| 39.671441
| 695
| 0.657392
|
3802abd6becd72782d1a192202de61a6f0a56c75
| 16,888
|
py
|
Python
|
tests/test_main.py
|
ioggstream/datamodel-code-generator
|
74e9139a1d74e186a61026c323c5ee9c66413e59
|
[
"MIT"
] | null | null | null |
tests/test_main.py
|
ioggstream/datamodel-code-generator
|
74e9139a1d74e186a61026c323c5ee9c66413e59
|
[
"MIT"
] | null | null | null |
tests/test_main.py
|
ioggstream/datamodel-code-generator
|
74e9139a1d74e186a61026c323c5ee9c66413e59
|
[
"MIT"
] | null | null | null |
import shutil
from pathlib import Path
from tempfile import TemporaryDirectory
import pytest
from _pytest.capture import CaptureFixture
from _pytest.tmpdir import TempdirFactory
from freezegun import freeze_time
from datamodel_code_generator.__main__ import Exit, main
DATA_PATH: Path = Path(__file__).parent / 'data'
OPEN_API_DATA_PATH: Path = DATA_PATH / 'openapi'
JSON_SCHEMA_DATA_PATH: Path = DATA_PATH / 'jsonschema'
JSON_DATA_PATH: Path = DATA_PATH / 'json'
YAML_DATA_PATH: Path = DATA_PATH / 'yaml'
EXPECTED_MAIN_PATH = DATA_PATH / 'expected' / 'main'
TIMESTAMP = '1985-10-26T01:21:00-07:00'
@freeze_time('2019-07-26')
def test_main():
with TemporaryDirectory() as output_dir:
output_file: Path = Path(output_dir) / 'output.py'
return_code: Exit = main(
[
'--input',
str(OPEN_API_DATA_PATH / 'api.yaml'),
'--output',
str(output_file),
]
)
assert return_code == Exit.OK
assert (
output_file.read_text()
== (EXPECTED_MAIN_PATH / 'main' / 'output.py').read_text()
)
with pytest.raises(SystemExit):
main()
@freeze_time('2019-07-26')
def test_main_base_class():
with TemporaryDirectory() as output_dir:
output_file: Path = Path(output_dir) / 'output.py'
return_code: Exit = main(
[
'--input',
str(OPEN_API_DATA_PATH / 'api.yaml'),
'--output',
str(output_file),
'--base-class',
'custom_module.Base',
]
)
assert return_code == Exit.OK
assert (
output_file.read_text()
== (EXPECTED_MAIN_PATH / 'main_base_class' / 'output.py').read_text()
)
with pytest.raises(SystemExit):
main()
@freeze_time('2019-07-26')
def test_target_python_version():
with TemporaryDirectory() as output_dir:
output_file: Path = Path(output_dir) / 'output.py'
return_code: Exit = main(
[
'--input',
str(OPEN_API_DATA_PATH / 'api.yaml'),
'--output',
str(output_file),
'--target-python-version',
'3.6',
]
)
assert return_code == Exit.OK
assert (
output_file.read_text()
== (EXPECTED_MAIN_PATH / 'target_python_version' / 'output.py').read_text()
)
with pytest.raises(SystemExit):
main()
@freeze_time('2019-07-26')
def test_main_autodetect():
with TemporaryDirectory() as output_dir:
output_file: Path = Path(output_dir) / 'output.py'
return_code: Exit = main(
[
'--input',
str(JSON_SCHEMA_DATA_PATH / 'person.json'),
'--output',
str(output_file),
'--input-file-type',
'auto',
]
)
assert return_code == Exit.OK
assert (
output_file.read_text()
== (EXPECTED_MAIN_PATH / 'main_autodetect' / 'output.py').read_text()
)
with pytest.raises(SystemExit):
main()
@freeze_time('2019-07-26')
def test_main_autodetect_failed():
with TemporaryDirectory() as input_dir, TemporaryDirectory() as output_dir:
input_file: Path = Path(input_dir) / 'input.yaml'
output_file: Path = Path(output_dir) / 'output.py'
input_file.write_text(':')
return_code: Exit = main(
[
'--input',
str(input_file),
'--output',
str(output_file),
'--input-file-type',
'auto',
]
)
assert return_code == Exit.ERROR
with pytest.raises(SystemExit):
main()
@freeze_time('2019-07-26')
def test_main_jsonschema():
with TemporaryDirectory() as output_dir:
output_file: Path = Path(output_dir) / 'output.py'
return_code: Exit = main(
[
'--input',
str(JSON_SCHEMA_DATA_PATH / 'person.json'),
'--output',
str(output_file),
'--input-file-type',
'jsonschema',
]
)
assert return_code == Exit.OK
assert (
output_file.read_text()
== (EXPECTED_MAIN_PATH / 'main_jsonschema' / 'output.py').read_text()
)
with pytest.raises(SystemExit):
main()
@freeze_time('2019-07-26')
def test_main_jsonschema_nested_deep():
import os
os.chdir(DATA_PATH / 'jsonschema')
with TemporaryDirectory() as output_dir:
output_init_file: Path = Path(output_dir) / '__init__.py'
output_nested_file: Path = Path(output_dir) / 'nested/deep.py'
output_empty_parent_nested_file: Path = Path(
output_dir
) / 'empty_parent/nested/deep.py'
return_code: Exit = main(
[
'--input',
str(JSON_SCHEMA_DATA_PATH / 'nested_person.json'),
'--output',
str(output_dir),
'--input-file-type',
'jsonschema',
]
)
assert return_code == Exit.OK
assert (
output_init_file.read_text()
== (
EXPECTED_MAIN_PATH / 'main_jsonschema_nested_deep' / '__init__.py'
).read_text()
)
assert (
output_nested_file.read_text()
== (
EXPECTED_MAIN_PATH
/ 'main_jsonschema_nested_deep'
/ 'nested'
/ 'deep.py'
).read_text()
)
assert (
output_empty_parent_nested_file.read_text()
== (
EXPECTED_MAIN_PATH
/ 'main_jsonschema_nested_deep'
/ 'empty_parent'
/ 'nested'
/ 'deep.py'
).read_text()
)
with pytest.raises(SystemExit):
main()
@freeze_time('2019-07-26')
def test_main_json():
with TemporaryDirectory() as output_dir:
output_file: Path = Path(output_dir) / 'output.py'
return_code: Exit = main(
[
'--input',
str(JSON_DATA_PATH / 'pet.json'),
'--output',
str(output_file),
'--input-file-type',
'json',
]
)
assert return_code == Exit.OK
assert (
output_file.read_text()
== (EXPECTED_MAIN_PATH / 'main_json' / 'output.py').read_text()
)
with pytest.raises(SystemExit):
main()
@freeze_time('2019-07-26')
def test_space_and_special_characters_json():
with TemporaryDirectory() as output_dir:
output_file: Path = Path(output_dir) / 'output.py'
return_code: Exit = main(
[
'--input',
str(JSON_DATA_PATH / 'space_and_special_characters.json'),
'--output',
str(output_file),
'--input-file-type',
'json',
]
)
assert return_code == Exit.OK
assert (
output_file.read_text()
== (
EXPECTED_MAIN_PATH / 'space_and_special_characters' / 'output.py'
).read_text()
)
with pytest.raises(SystemExit):
main()
@freeze_time('2019-07-26')
def test_main_json_failed():
with TemporaryDirectory() as output_dir:
output_file: Path = Path(output_dir) / 'output.py'
return_code: Exit = main(
[
'--input',
str(JSON_DATA_PATH / 'broken.json'),
'--output',
str(output_file),
'--input-file-type',
'json',
]
)
assert return_code == Exit.ERROR
with pytest.raises(SystemExit):
main()
@freeze_time('2019-07-26')
def test_main_yaml():
with TemporaryDirectory() as output_dir:
output_file: Path = Path(output_dir) / 'output.py'
return_code: Exit = main(
[
'--input',
str(YAML_DATA_PATH / 'pet.yaml'),
'--output',
str(output_file),
'--input-file-type',
'yaml',
]
)
assert return_code == Exit.OK
assert (
output_file.read_text()
== (EXPECTED_MAIN_PATH / 'main_yaml' / 'output.py').read_text()
)
with pytest.raises(SystemExit):
main()
def test_main_modular(tmpdir_factory: TempdirFactory) -> None:
"""Test main function on modular file."""
output_directory = Path(tmpdir_factory.mktemp('output'))
input_filename = OPEN_API_DATA_PATH / 'modular.yaml'
output_path = output_directory / 'model'
with freeze_time(TIMESTAMP):
main(['--input', str(input_filename), '--output', str(output_path)])
main_modular_dir = EXPECTED_MAIN_PATH / 'main_modular'
for path in main_modular_dir.rglob('*.py'):
result = output_path.joinpath(path.relative_to(main_modular_dir)).read_text()
assert result == path.read_text()
def test_main_modular_no_file() -> None:
"""Test main function on modular file with no output name."""
input_filename = OPEN_API_DATA_PATH / 'modular.yaml'
assert main(['--input', str(input_filename)]) == Exit.ERROR
def test_main_modular_filename(tmpdir_factory: TempdirFactory) -> None:
"""Test main function on modular file with filename."""
output_directory = Path(tmpdir_factory.mktemp('output'))
input_filename = OPEN_API_DATA_PATH / 'modular.yaml'
output_filename = output_directory / 'model.py'
assert (
main(['--input', str(input_filename), '--output', str(output_filename)])
== Exit.ERROR
)
def test_main_no_file(capsys: CaptureFixture) -> None:
"""Test main function on non-modular file with no output name."""
input_filename = OPEN_API_DATA_PATH / 'api.yaml'
with freeze_time(TIMESTAMP):
main(['--input', str(input_filename)])
captured = capsys.readouterr()
assert (
captured.out == (EXPECTED_MAIN_PATH / 'main_no_file' / 'output.py').read_text()
)
assert not captured.err
def test_main_custom_template_dir(capsys: CaptureFixture) -> None:
"""Test main function with custom template directory."""
input_filename = OPEN_API_DATA_PATH / 'api.yaml'
custom_template_dir = DATA_PATH / 'templates'
extra_template_data = OPEN_API_DATA_PATH / 'extra_data.json'
with freeze_time(TIMESTAMP):
main(
[
'--input',
str(input_filename),
'--custom-template-dir',
str(custom_template_dir),
'--extra-template-data',
str(extra_template_data),
]
)
captured = capsys.readouterr()
assert (
captured.out
== (EXPECTED_MAIN_PATH / 'main_custom_template_dir' / 'output.py').read_text()
)
assert not captured.err
@freeze_time('2019-07-26')
def test_pyproject():
with TemporaryDirectory() as output_dir:
output_dir = Path(output_dir)
pyproject_toml = Path(DATA_PATH) / "project" / "pyproject.toml"
shutil.copy(pyproject_toml, output_dir)
output_file: Path = output_dir / 'output.py'
return_code: Exit = main(
[
'--input',
str(OPEN_API_DATA_PATH / 'api.yaml'),
'--output',
str(output_file),
]
)
assert return_code == Exit.OK
assert (
output_file.read_text()
== (EXPECTED_MAIN_PATH / 'pyproject' / 'output.py').read_text()
)
with pytest.raises(SystemExit):
main()
@freeze_time('2019-07-26')
def test_validation():
with TemporaryDirectory() as output_dir:
output_file: Path = Path(output_dir) / 'output.py'
return_code: Exit = main(
[
'--input',
str(OPEN_API_DATA_PATH / 'api.yaml'),
'--output',
str(output_file),
'--validation',
]
)
assert return_code == Exit.OK
assert (
output_file.read_text()
== (EXPECTED_MAIN_PATH / 'validation' / 'output.py').read_text()
)
with pytest.raises(SystemExit):
main()
@freeze_time('2019-07-26')
def test_validation_failed():
with TemporaryDirectory() as output_dir:
output_file: Path = Path(output_dir) / 'output.py'
assert (
main(
[
'--input',
str(OPEN_API_DATA_PATH / 'invalid.yaml'),
'--output',
str(output_file),
'--input-file-type',
'openapi',
'--validation',
]
)
== Exit.ERROR
)
@freeze_time('2019-07-26')
def test_main_with_field_constraints():
with TemporaryDirectory() as output_dir:
output_file: Path = Path(output_dir) / 'output.py'
return_code: Exit = main(
[
'--input',
str(OPEN_API_DATA_PATH / 'api_constrained.yaml'),
'--output',
str(output_file),
'--field-constraints',
]
)
assert return_code == Exit.OK
assert (
output_file.read_text()
== (
EXPECTED_MAIN_PATH / 'main_with_field_constraints' / 'output.py'
).read_text()
)
with pytest.raises(SystemExit):
main()
@freeze_time('2019-07-26')
def test_main_without_field_constraints():
with TemporaryDirectory() as output_dir:
output_file: Path = Path(output_dir) / 'output.py'
return_code: Exit = main(
[
'--input',
str(OPEN_API_DATA_PATH / 'api_constrained.yaml'),
'--output',
str(output_file),
]
)
assert return_code == Exit.OK
assert (
output_file.read_text()
== (
EXPECTED_MAIN_PATH / 'main_without_field_constraints' / 'output.py'
).read_text()
)
with pytest.raises(SystemExit):
main()
@freeze_time('2019-07-26')
def test_main_with_aliases():
with TemporaryDirectory() as output_dir:
output_file: Path = Path(output_dir) / 'output.py'
return_code: Exit = main(
[
'--input',
str(OPEN_API_DATA_PATH / 'api.yaml'),
'--aliases',
str(OPEN_API_DATA_PATH / 'aliases.json'),
'--output',
str(output_file),
]
)
assert return_code == Exit.OK
assert (
output_file.read_text()
== (EXPECTED_MAIN_PATH / 'main_with_aliases' / 'output.py').read_text()
)
with pytest.raises(SystemExit):
main()
def test_main_with_bad_aliases():
with TemporaryDirectory() as output_dir:
output_file: Path = Path(output_dir) / 'output.py'
return_code: Exit = main(
[
'--input',
str(OPEN_API_DATA_PATH / 'api.yaml'),
'--aliases',
str(OPEN_API_DATA_PATH / 'not.json'),
'--output',
str(output_file),
]
)
assert return_code == Exit.ERROR
with pytest.raises(SystemExit):
main()
def test_main_with_more_bad_aliases():
with TemporaryDirectory() as output_dir:
output_file: Path = Path(output_dir) / 'output.py'
return_code: Exit = main(
[
'--input',
str(OPEN_API_DATA_PATH / 'api.yaml'),
'--aliases',
str(OPEN_API_DATA_PATH / 'list.json'),
'--output',
str(output_file),
]
)
assert return_code == Exit.ERROR
with pytest.raises(SystemExit):
main()
def test_main_with_bad_extra_data():
with TemporaryDirectory() as output_dir:
output_file: Path = Path(output_dir) / 'output.py'
return_code: Exit = main(
[
'--input',
str(OPEN_API_DATA_PATH / 'api.yaml'),
'--extra-template-data',
str(OPEN_API_DATA_PATH / 'not.json'),
'--output',
str(output_file),
]
)
assert return_code == Exit.ERROR
with pytest.raises(SystemExit):
main()
| 29.017182
| 87
| 0.532627
|
80c952d4cabd38050c5325687fec001b0b1da706
| 1,059
|
py
|
Python
|
env/lib/python3.8/site-packages/plotly/validators/layout/xaxis/rangeslider/_yaxis.py
|
acrucetta/Chicago_COVI_WebApp
|
a37c9f492a20dcd625f8647067394617988de913
|
[
"MIT",
"Unlicense"
] | 11,750
|
2015-10-12T07:03:39.000Z
|
2022-03-31T20:43:15.000Z
|
env/lib/python3.8/site-packages/plotly/validators/layout/xaxis/rangeslider/_yaxis.py
|
acrucetta/Chicago_COVI_WebApp
|
a37c9f492a20dcd625f8647067394617988de913
|
[
"MIT",
"Unlicense"
] | 2,951
|
2015-10-12T00:41:25.000Z
|
2022-03-31T22:19:26.000Z
|
env/lib/python3.8/site-packages/plotly/validators/layout/xaxis/rangeslider/_yaxis.py
|
acrucetta/Chicago_COVI_WebApp
|
a37c9f492a20dcd625f8647067394617988de913
|
[
"MIT",
"Unlicense"
] | 2,623
|
2015-10-15T14:40:27.000Z
|
2022-03-28T16:05:50.000Z
|
import _plotly_utils.basevalidators
class YaxisValidator(_plotly_utils.basevalidators.CompoundValidator):
def __init__(
self, plotly_name="yaxis", parent_name="layout.xaxis.rangeslider", **kwargs
):
super(YaxisValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
data_class_str=kwargs.pop("data_class_str", "YAxis"),
data_docs=kwargs.pop(
"data_docs",
"""
range
Sets the range of this axis for the
rangeslider.
rangemode
Determines whether or not the range of this
axis in the rangeslider use the same value than
in the main plot when zooming in/out. If
"auto", the autorange will be used. If "fixed",
the `range` is used. If "match", the current
range of the corresponding y-axis on the main
subplot is used.
""",
),
**kwargs
)
| 35.3
| 83
| 0.556185
|
abd06ee2acfc8408c3945ea78bb6d4210cf8c97b
| 10,791
|
py
|
Python
|
eleanor/update.py
|
orionlee/eleanor
|
bd5b8cdc6d80bda9ebab2b464f07aab24bc93f7f
|
[
"MIT"
] | null | null | null |
eleanor/update.py
|
orionlee/eleanor
|
bd5b8cdc6d80bda9ebab2b464f07aab24bc93f7f
|
[
"MIT"
] | null | null | null |
eleanor/update.py
|
orionlee/eleanor
|
bd5b8cdc6d80bda9ebab2b464f07aab24bc93f7f
|
[
"MIT"
] | null | null | null |
import os
from urllib.request import urlopen
from datetime import datetime
import math
from astropy.coordinates import SkyCoord
from astropy import units as u
from astroquery.mast import Tesscut
from astropy.io import fits
import numpy as np
import sys
import requests
from bs4 import BeautifulSoup
eleanorpath = os.path.join(os.path.expanduser('~'), '.eleanor')
if not os.path.exists(eleanorpath):
try:
os.mkdir(eleanorpath)
except OSError:
eleanorpath = os.path.dirname(__file__)
def hmsm_to_days(hour=0, min=0, sec=0, micro=0):
days = sec + (micro / 1.e6)
days = min + (days / 60.)
days = hour + (days / 60.)
return days / 24.
def date_to_jd(year, month, day):
if month == 1 or month == 2:
yearp = year - 1
monthp = month + 12
else:
yearp = year
monthp = month
# this checks where we are in relation to October 15, 1582, the beginning
# of the Gregorian calendar.
if ((year < 1582) or (year == 1582 and month < 10) or
(year == 1582 and month == 10 and day < 15)):
# before start of Gregorian calendar
B = 0
else:
# after start of Gregorian calendar
A = math.trunc(yearp / 100.)
B = 2 - A + math.trunc(A / 4.)
if yearp < 0:
C = math.trunc((365.25 * yearp) - 0.75)
else:
C = math.trunc(365.25 * yearp)
D = math.trunc(30.6001 * (monthp + 1))
# including leap second correction
jd = B + C + D + day + 1720994.5 + 0.0008
return jd
def listFD(url, ext=''):
page = requests.get(url).text
soup = BeautifulSoup(page, 'html.parser')
return [url + node.get('href') for node in soup.find_all('a') if
node.get('href').endswith(ext)]
__all__ = ['Update', 'update_all']
def update_all():
sector = 1
good = 1
while good:
try:
Update(sector=sector)
except AttributeError:
good = 0
sector += 1
class Update(object):
def __init__(self, sector=None):
if sector is None:
print('Please pass a sector into eleanor.Update().')
return
self.sector = sector
self.metadata_path = os.path.join(eleanorpath, 'metadata/s{0:04d}'.format(self.sector))
lastfile = 'cbv_components_s{0:04d}_0004_0004.txt'.format(self.sector)
# Checks to see if directory contains all necessary files first
if os.path.isdir(self.metadata_path):
if lastfile in os.listdir(self.metadata_path):
print('This directory already exists!')
return
self.north_coords = SkyCoord('16:35:50.667 +63:54:39.87',
unit=(u.hourangle, u.deg))
self.south_coords = SkyCoord('04:35:50.330 -64:01:37.33',
unit=(u.hourangle, u.deg))
if self.sector < 14 or self.sector > 26:
try:
manifest = Tesscut.download_cutouts(self.south_coords, 31, sector=self.sector)
success = 1
except:
print("This sector isn't available yet.")
return
else:
try:
manifest = Tesscut.download_cutouts(self.north_coords, 31, sector=self.sector)
success = 1
except:
print("This sector isn't available yet.")
return
if success == 1:
if os.path.isdir(self.metadata_path) == True:
pass
else:
os.mkdir(self.metadata_path)
self.cutout = fits.open(manifest['Local Path'][0])
print('This is the first light curve you have made for this sector. '
'Getting eleanor metadata products for '
'Sector {0:2d}...'.format(self.sector))
print('This will only take a minute, and only needs to be done once. '
'Any other light curves you make in this sector will be faster.')
self.get_target()
print('Target Acquired')
self.get_cadences()
print('Cadences Calculated')
self.get_quality()
print('Quality Flags Assured')
self.get_cbvs()
print('CBVs Made')
print('Success! Sector {:2d} now available.'.format(self.sector))
os.remove(manifest['Local Path'][0])
self.try_next_sector()
def get_cbvs(self):
if self.sector <= 6:
year = 2018
elif self.sector <= 20:
year = 2019
else:
year = 2020
url = 'https://archive.stsci.edu/missions/tess/ffi/s{0:04d}/{1}/'.format(self.sector, year)
directs = []
for file in listFD(url):
directs.append(file)
directs = np.sort(directs)[1::]
subdirects = []
for file in listFD(directs[0]):
subdirects.append(file)
subdirects = np.sort(subdirects)[1:-4]
for i in range(len(subdirects)):
file = listFD(subdirects[i], ext='_cbv.fits')[0]
os.system('curl -O -L {}'.format(file))
time = self.cutout[1].data['TIME'] - self.cutout[1].data['TIMECORR']
files = os.listdir('.')
files = [i for i in files if i.endswith('_cbv.fits') and
's{0:04d}'.format(self.sector) in i]
for c in range(len(files)):
cbv = fits.open(files[c])
camera = cbv[1].header['CAMERA']
ccd = cbv[1].header['CCD']
cbv_time = cbv[1].data['Time']
new_fn = eleanorpath + '/metadata/s{0:04d}/cbv_components_s{0:04d}_{1:04d}_{2:04d}.txt'.format(self.sector, camera, ccd)
convolved = np.zeros((len(time), 16))
for i in range(len(time)):
g = np.argmin(np.abs(time[i] - cbv_time))
for j in range(16):
index = 'VECTOR_{0}'.format(j+1)
if self.sector < 27:
cads = np.arange(g-7, g+8, 1)
else:
# XXX: need to test when TESSCut becomes available
cads = np.arange(g-2, g+3, 1)
convolved[i, j] = np.mean(cbv[1].data[index][cads])
np.savetxt(new_fn, convolved)
cbv.close()
files = [i for i in files if i.endswith('_cbv.fits') and
's{0:04d}'.format(self.sector) in i]
for c in range(len(files)):
os.remove(files[c])
def try_next_sector(self):
codepath = os.path.dirname(__file__)
f1 = open(codepath + '/maxsector.py', 'r')
oldmax = float(f1.readline().split('=')[-1])
if self.sector > oldmax:
f = open(codepath + '/maxsector.py', 'w')
f.write('maxsector = {:2d}'.format(self.sector))
f.close()
def get_target(self):
filelist = urlopen('https://archive.stsci.edu/missions/tess/download_scripts/sector/tesscurl_sector_{:d}_lc.sh'.
format(self.sector))
for line in filelist:
if len(str(line)) > 30:
import shutil
os.system(str(line)[2:-3])
fn = str(line)[2:-3].split()[5]
shutil.move(fn, eleanorpath + '/metadata/s{0:04d}/target_s{0:04d}.fits'.format(self.sector))
break
return
def get_cadences(self):
if self.sector < 27:
# these come from the first FFI cadence of S7, in particular
# Camera 1, CCD 1 for the t0. The t0s vary by ~1 minute because of
# barycentric corrections on different cameras
index_zeropoint = 12680
index_t0 = 1491.625533688852
else:
# first FFI cadence of S27 from Cam 1, CCD 1
index_zeropoint = 116470
index_t0 = 2036.283350837239
times = np.array([], dtype=int)
filelist = urlopen('https://archive.stsci.edu/missions/tess/download_scripts/sector/tesscurl_sector_{:d}_ffic.sh'.
format(self.sector))
for line in filelist:
if len(str(line)) > 30:
times = np.append(times, int(str(line).split('tess')[1][0:13]))
times = np.sort(np.unique(times))
outarr = np.zeros_like(times)
for i in range(len(times)):
date = datetime.strptime(str(times[i]), '%Y%j%H%M%S')
days = date.day + hmsm_to_days(date.hour, date.minute,
date.second, date.microsecond)
tjd = date_to_jd(date.year, date.month, days) - 2457000
if self.sector < 27:
cad = (tjd - index_t0)/(30./1440.)
else:
cad = (tjd - index_t0)/(10./1440.)
outarr[i] = (int(np.round(cad))+index_zeropoint)
np.savetxt(eleanorpath + '/metadata/s{0:04d}/cadences_s{0:04d}.txt'.format(self.sector), outarr, fmt='%i')
return
def get_quality(self):
""" Uses the quality flags in a 2-minute target to create quality flags
in the postcards.
"""
ffi_time = self.cutout[1].data['TIME'] - self.cutout[1].data['TIMECORR']
shortCad_fn = eleanorpath + '/metadata/s{0:04d}/target_s{0:04d}.fits'.format(self.sector)
# Binary string for values which apply to the FFIs
if self.sector > 26:
ffi_apply = int('100000000010101111', 2)
else:
ffi_apply = int('100010101111', 2)
# Obtains information for 2-minute target
twoMin = fits.open(shortCad_fn)
twoMinTime = twoMin[1].data['TIME']-twoMin[1].data['TIMECORR']
finite = np.isfinite(twoMinTime)
twoMinQual = twoMin[1].data['QUALITY']
twoMinTime = twoMinTime[finite]
twoMinQual = twoMinQual[finite]
convolve_ffi = []
for i in range(len(ffi_time)):
where = np.where(np.abs(ffi_time[i] - twoMinTime) == np.min(np.abs(ffi_time[i] - twoMinTime)))[0][0]
sflux = np.sum(self.cutout[1].data['FLUX'][i])
nodata = 0
if sflux == 0:
nodata = 131072
if (ffi_time[i] > 1420) and (ffi_time[i] < 1424):
nodata = 131072
if self.sector < 27:
v = np.bitwise_or.reduce(twoMinQual[where-7:where+8])
else:
# XXX: need to test when TESSCut is available in S27
v = np.bitwise_or.reduce(twoMinQual[where - 2:where + 3])
convolve_ffi.append(np.bitwise_or(v, nodata))
convolve_ffi = np.array(convolve_ffi)
flags = np.bitwise_and(convolve_ffi, ffi_apply)
np.savetxt(eleanorpath + '/metadata/s{0:04d}/quality_s{0:04d}.txt'.format(self.sector), flags, fmt='%i')
| 35.264706
| 132
| 0.549995
|
d4da447061cace964384dbef3d7c786352fd9d2f
| 1,227
|
py
|
Python
|
app/mod_auth/controllers.py
|
kelvinchot/FaceRecognitionBackend
|
c83f8959b2792511174cfa233d8c2b852f26db74
|
[
"MIT"
] | 2
|
2019-11-26T17:56:32.000Z
|
2019-12-07T08:09:27.000Z
|
app/mod_auth/controllers.py
|
kelvinchot/FaceRecognitionBackend
|
c83f8959b2792511174cfa233d8c2b852f26db74
|
[
"MIT"
] | null | null | null |
app/mod_auth/controllers.py
|
kelvinchot/FaceRecognitionBackend
|
c83f8959b2792511174cfa233d8c2b852f26db74
|
[
"MIT"
] | 1
|
2020-10-27T07:20:50.000Z
|
2020-10-27T07:20:50.000Z
|
# Import flask dependencies
from flask import Blueprint, request, render_template, \
flash, g, session, redirect, url_for
# Import password / encryption helper tools
from werkzeug import check_password_hash, generate_password_hash
# Import the database object from the main app module
from app import db
# Import module forms
from app.mod_auth.forms import LoginForm
# Import module models (i.e. User)
from app.mod_auth.models import User
# Define the blueprint: 'auth', set its url prefix: app.url/auth
mod_auth = Blueprint('auth', __name__, url_prefix='/auth')
# Set the route and accepted methods
@mod_auth.route('/signin/', methods=['GET', 'POST'])
def signin():
# If sign in form is submitted
form = LoginForm(request.form)
# Verify the sign in form
if form.validate_on_submit():
user = User.query.filter_by(email=form.email.data).first()
if user and check_password_hash(user.password, form.password.data):
session['user_id'] = user.id
flash('Welcome %s' % user.name)
return redirect(url_for('auth.home'))
flash('Wrong email or password', 'error-message')
return render_template("auth/signin.html", form=form)
| 29.214286
| 75
| 0.699267
|
3f5f874b8758e7ba20a2c53d4b42214e5eb72219
| 9,066
|
py
|
Python
|
tests/test_unit_hashlist.py
|
andrelucas/hsync
|
91f207c8fc419c773555bec355d0b0da35061044
|
[
"BSD-3-Clause"
] | 5
|
2015-03-23T23:17:42.000Z
|
2017-03-31T16:14:29.000Z
|
tests/test_unit_hashlist.py
|
andrelucas/hsync
|
91f207c8fc419c773555bec355d0b0da35061044
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_unit_hashlist.py
|
andrelucas/hsync
|
91f207c8fc419c773555bec355d0b0da35061044
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python
# Copyright (c) 2015, Andre Lucas
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the <organization> nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
# THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from __future__ import print_function
import inspect
import shutil
import unittest
from hsync.exceptions import *
from hsync.filehash import *
from hsync.hashlist import *
from hsync.hashlist_sqlite import *
from hsync.idmapper import *
class HashListTestCase(unittest.TestCase):
me = inspect.getfile(inspect.currentframe())
topdir = os.path.join(os.path.dirname(me), 'test')
mapper = UidGidMapper()
all_impl = HashList, SqliteHashList
@classmethod
def setUpClass(self):
self.user = self.mapper.get_name_for_uid(os.getuid())
self.group = self.mapper.get_group_for_gid(os.getgid())
def tearDown(self):
if hasattr(self, 'tmp') and self.tmp:
shutil.rmtree(self.tmp, True)
def test_init(self):
'''Object creation tests'''
for T in self.all_impl:
log.warning("XXX T %s", T)
hl = T()
self.assertIsNotNone(hl, "Non-null object returned")
def test_append(self):
'''Object can be appended'''
for T in self.all_impl:
hl = T()
fh = FileHash.init_from_string("0 100644 %s %s 0 0 test" %
(self.user, self.group))
# This should not raise.
hl.append(fh)
# Attempting to append a non-FileHash should raise.
with self.assertRaises(NotAFileHashError):
hl.append(None)
with self.assertRaises(NotAFileHashError):
hl.append("Nope")
def test_duplicate_raise(self):
'''Check exceptions are properly raised'''
for T in self.all_impl:
hl = T(raise_on_duplicates=True)
fh = FileHash.init_from_string("0 100644 %s %s 0 0 test" %
(self.user, self.group))
# This should not raise.
hl.append(fh)
self.assertEqual(len(hl), 1)
# Duplicate should raise.
with self.assertRaises(DuplicateEntryInHashListError):
hl.append(fh)
'''Check exceptions are properly raised'''
hl = T(raise_on_duplicates=False)
fh = FileHash.init_from_string("0 100644 %s %s 0 0 test" %
(self.user, self.group))
# This should not raise.
hl.append(fh)
# Duplicate should not raise.
hl.append(fh)
self.assertEqual(len(hl), 2)
def test_list_iterator(self):
'''Check we can iterate over the list properly'''
for T in self.all_impl:
hl = T()
fhlist = []
pfx = "0 100644 %s %s 0 0 test" % (self.user, self.group)
for n in xrange(1000):
fh = FileHash.init_from_string(pfx + '%0.3i' % n)
fhlist.append(fh)
hl.extend(fhlist)
self.assertEqual(len(hl), len(fhlist))
for n, fh in enumerate(hl):
self.assertEqual(fh, fhlist[n])
def test_list_indexing(self):
'''Check we can index the list properly'''
for T in self.all_impl:
hl = T()
fhlist = []
pfx = "0 100644 %s %s 0 0 test" % (self.user, self.group)
for n in xrange(1000):
fh = FileHash.init_from_string(pfx + '%0.3i' % n)
fhlist.append(fh)
hl.extend(fhlist)
self.assertEqual(len(hl), len(fhlist))
for n in xrange(1000):
self.assertEqual(hl[n], fhlist[n])
class HashDictTestCase(unittest.TestCase):
me = inspect.getfile(inspect.currentframe())
topdir = os.path.join(os.path.dirname(me), 'test')
mapper = UidGidMapper()
all_impl = HashList, SqliteHashList
@classmethod
def setUpClass(self):
self.user = self.mapper.get_name_for_uid(os.getuid())
self.group = self.mapper.get_group_for_gid(os.getgid())
def tearDown(self):
if hasattr(self, 'tmp') and self.tmp:
shutil.rmtree(self.tmp, True)
def test_init(self):
'''Object creation tests'''
with self.assertRaises(InitialiserNotAHashListError):
HashDict()
for T in self.all_impl:
hl = T()
HashDict(hl)
def test_lookup(self):
'''Check objects can be looked up'''
for T in self.all_impl:
hl = T()
fh = FileHash.init_from_string("0 100644 %s %s 0 0 test" %
(self.user, self.group))
# This should not raise.
hl.append(fh)
hd = HashDict(hl)
nfh = hd[fh.fpath]
self.assertIsInstance(nfh, FileHash, 'HashDict returns FileHash')
self.assertEqual(fh, nfh, 'HashDict returns same FileHash')
def test_multi_lookup(self):
'''Check we can index the dict properly'''
for T in self.all_impl:
hl = T()
fhlist = []
pfx = "0 100644 %s %s 0 0 test" % (self.user, self.group)
for n in xrange(1000):
fh = FileHash.init_from_string(pfx + '%0.3i' % n)
fhlist.append(fh)
hl.extend(fhlist)
self.assertEqual(len(hl), len(fhlist))
hd = HashDict(hl)
for fh in fhlist:
nfh = hd[fh.fpath]
self.assertEqual(fh, nfh, 'Path-based lookup works')
def test_iter(self):
'''Check we can index the dict properly'''
for T in self.all_impl:
hl = T()
fhlist = []
pfx = "0 100644 %s %s 0 0 test" % (self.user, self.group)
for n in xrange(1000):
fh = FileHash.init_from_string(pfx + '%0.3i' % n)
fhlist.append(fh)
hl.extend(fhlist)
self.assertEqual(len(hl), len(fhlist))
hd = HashDict(hl)
# Could use enumerate() below, but it makes a mess of the already-
# messy generator expression used to get sorted keys.
curfile = 0
for k, v in ((k, hd[k]) for k in sorted(hd.iterkeys())):
self.assertIsInstance(k, str, 'Key is correct type')
self.assertIsInstance(v, FileHash, 'Value is correct type')
self.assertEqual(v.fpath, 'test%0.3i' % curfile,
'Correct file returned')
curfile += 1
print(k, v)
def test_sort(self):
'''Check the keys are in path order'''
for T in self.all_impl:
pfx = "0 100644 %s %s 0 0 " % (self.user, self.group)
pathlist = [
['a', 'a/1', 'a/2', 'b'],
['b', 'a/1', 'a/2', 'a', 'c'],
['z', 'y', 'x', 'w'],
]
for paths in pathlist:
hl = T()
sorted_paths = sorted(paths)
fhlist = []
for f in paths:
fh = FileHash.init_from_string(pfx + f)
fhlist.append(fh)
hl.extend(fhlist)
fd = HashDict(hl)
for n, (k, fh) in enumerate(fd.iteritems()):
# The dict keys are the path.
self.assertEqual(k, sorted_paths[n])
# Check the object as well, for good measure.
self.assertEqual(fh.fpath, sorted_paths[n])
| 34.603053
| 78
| 0.561769
|
332aff550905f023565c7ba112e205772380baaf
| 1,808
|
py
|
Python
|
src/cmdline/config.py
|
rca/cmdline
|
ecc42b1efe98528d4b4a73b42199852870371bdd
|
[
"Apache-2.0"
] | null | null | null |
src/cmdline/config.py
|
rca/cmdline
|
ecc42b1efe98528d4b4a73b42199852870371bdd
|
[
"Apache-2.0"
] | 3
|
2016-08-30T09:45:59.000Z
|
2020-03-31T00:51:16.000Z
|
src/cmdline/config.py
|
rca/cmdline
|
ecc42b1efe98528d4b4a73b42199852870371bdd
|
[
"Apache-2.0"
] | 2
|
2016-07-02T19:19:04.000Z
|
2018-01-24T03:32:30.000Z
|
import inspect
import os
import sys
CONFIG_ROOT = 'CMDLINE_CONFIG_ROOT'
def find_config_root(path=sys.argv[0]):
"""
Finds config root relative to the given file path
"""
dirname = os.path.dirname(path)
lastdirname = None
while dirname != lastdirname:
config_root = os.path.join(dirname, 'config')
if os.path.exists(config_root):
return config_root
lastdirname, dirname = dirname, os.path.dirname(dirname)
def get_config_paths(filename=None, reversed=False):
config_paths = []
script_name = os.path.basename(sys.argv[0])
package = inspect.stack()[-2].frame.f_globals['__package__']
data_path = os.path.join('cmdline', package)
config_root = os.environ.get(CONFIG_ROOT)
if config_root:
if not os.path.exists(config_root):
raise OSError('{}={} does not exist'.format(CONFIG_ROOT, config_root))
config_locations = (
config_root,
)
else:
config_locations = ()
# handle debian/ubuntu strangeness where `pip install` will install
# to /usr/local, yet sys.prefix is /usr
prefix = sys.prefix
if not os.path.exists(os.path.join(prefix, data_path)):
_prefix = os.path.join(prefix, 'local')
if os.path.exists(os.path.join(_prefix, data_path)):
prefix = _prefix
for dirpath in (
os.path.join(prefix, data_path),
os.path.join(prefix, 'etc', script_name),
os.path.expanduser('~/.{}'.format(script_name)),
) + config_locations:
full_path = dirpath
if filename:
full_path = os.path.join(full_path, filename)
config_paths.append(full_path)
if config_root and reversed:
return config_paths[-1::-1]
return config_paths
| 27.393939
| 82
| 0.63219
|
8a07da81d0b4847561b64b3ec402578ac9525ef0
| 1,490
|
py
|
Python
|
openfe/tests/dev/serialization_test_templates.py
|
OpenFreeEnergy/openfe
|
f2423ed2e4444a0824ff9b560e23b420c4997c6f
|
[
"MIT"
] | 14
|
2022-01-24T22:01:19.000Z
|
2022-03-31T04:58:35.000Z
|
openfe/tests/dev/serialization_test_templates.py
|
OpenFreeEnergy/openfe
|
f2423ed2e4444a0824ff9b560e23b420c4997c6f
|
[
"MIT"
] | 109
|
2022-01-24T18:57:05.000Z
|
2022-03-31T20:13:07.000Z
|
openfe/tests/dev/serialization_test_templates.py
|
OpenFreeEnergy/openfe
|
f2423ed2e4444a0824ff9b560e23b420c4997c6f
|
[
"MIT"
] | 4
|
2022-01-24T18:45:54.000Z
|
2022-02-21T06:28:24.000Z
|
#!/usr/bin/env python
# This script creates several files used in testing setup serialization:
#
# * openfe/tests/data/multi_molecule.sdf
# * openfe/tests/data/serialization/ethane_template.sdf
# * openfe/tests/data/serialization/network_template.graphml
#
# The two serialization templates need manual editing to replace the current
# version of gufe with:
# {GUFE_VERSION}
from rdkit import Chem
from openfe.setup import SmallMoleculeComponent, LigandAtomMapping, Network
# multi_molecule.sdf
mol1 = Chem.MolFromSmiles("CCO")
mol2 = Chem.MolFromSmiles("CCC")
writer = Chem.SDWriter("multi_molecule.sdf")
writer.write(mol1)
writer.write(mol2)
writer.close()
# ethane_template.sdf
m = SmallMoleculeComponent(Chem.MolFromSmiles("CC"), name="ethane")
with open("ethane_template.sdf", mode="w") as tmpl:
tmpl.write(m.to_sdf())
# ethane_with_H_template.sdf
m2 = SmallMoleculeComponent(Chem.AddHs(m.to_rdkit()))
with open("ethane_with_H_template.sdf", mode="w") as tmpl:
tmpl.write(m2.to_sdf())
# network_template.graphml
mol1 = SmallMoleculeComponent(Chem.MolFromSmiles("CCO"))
mol2 = SmallMoleculeComponent(Chem.MolFromSmiles("CC"))
mol3 = SmallMoleculeComponent(Chem.MolFromSmiles("CO"))
edge12 = LigandAtomMapping(mol1, mol2, {0: 0, 1: 1})
edge23 = LigandAtomMapping(mol2, mol3, {0: 0})
edge13 = LigandAtomMapping(mol1, mol3, {0: 0, 2: 1})
network = Network([edge12, edge23, edge13])
with open("network_template.graphml", "w") as fn:
fn.write(network.to_graphml())
| 28.653846
| 76
| 0.757718
|
0d7758610466286a05198a4d6f05b7488a107e50
| 519
|
py
|
Python
|
boxplot_metrics.py
|
IamAbhilashPathak/CNERG
|
b9e3e52ad72773f79c53a2c6134767cc4a66f542
|
[
"MIT"
] | null | null | null |
boxplot_metrics.py
|
IamAbhilashPathak/CNERG
|
b9e3e52ad72773f79c53a2c6134767cc4a66f542
|
[
"MIT"
] | null | null | null |
boxplot_metrics.py
|
IamAbhilashPathak/CNERG
|
b9e3e52ad72773f79c53a2c6134767cc4a66f542
|
[
"MIT"
] | 1
|
2018-08-25T06:42:41.000Z
|
2018-08-25T06:42:41.000Z
|
import collections as cl
import matplotlib.pyplot as plt
import numpy as np
import sys
import os
http = []
quic = []
x = ["http","quic"]
y = [http,quic]
count = 0
with open("QOE_metrics_new", "r") as data:
for line in data:
p = line.split()
if (count == 0):
http.append(float(p[3]))
else:
quic.append(float(p[3]))
count = (count + 1) % 2
plt.ylabel('Startup Delay (in sec)')
plt.savefig('Startup Delay Boxplot.png')
plt.boxplot(y,labels = x)
plt.show()
| 16.741935
| 42
| 0.589595
|
7994154e02da34272f40540230dc8fee4e068919
| 1,828
|
py
|
Python
|
tests/test_miner_monitor.py
|
ployt0/server_monitor
|
835e48ed317b4b069ebd66675ca2d1b3120770c0
|
[
"MIT"
] | null | null | null |
tests/test_miner_monitor.py
|
ployt0/server_monitor
|
835e48ed317b4b069ebd66675ca2d1b3120770c0
|
[
"MIT"
] | null | null | null |
tests/test_miner_monitor.py
|
ployt0/server_monitor
|
835e48ed317b4b069ebd66675ca2d1b3120770c0
|
[
"MIT"
] | null | null | null |
import datetime
from unittest.mock import patch, sentinel, Mock
from generalised_functions import ResultHolder, ErrorHandler, DAY_TIME_FMT
from miner_monitor import CheckResult, interrog_routine, display_opt_int_list
@patch("miner_monitor.format_ipv4", return_value="yarp!")
def test_CheckResult(mock_format_ipv4):
res = CheckResult(sentinel.time, sentinel.ipv4, sentinel.ping, sentinel.ping_max,
sentinel.mem_avail, sentinel.disk_avail, sentinel.last_boot, sentinel.ports,
sentinel.peers)
assert len(res.get_header().split(",")) == 11
mock_format_ipv4.assert_not_called()
assert len(res.to_csv().split(",")) == 11
mock_format_ipv4.assert_called_once_with(sentinel.ipv4)
assert res.get_unit_name() == "miner"
@patch("miner_monitor.CheckResult", autospec=True)
@patch("miner_monitor.MinerInterrogator.do_queries", autospec=True)
def test_interrog_routine(mock_queries, mock_check_result):
result_holder = Mock(ResultHolder, time=datetime.datetime.utcnow())
err_handler = Mock(ErrorHandler)
sample_latencies = ['16.0', '15.0', '18.0', '16.0']
ave_latency = str(int(round(sum(map(float, sample_latencies)) / len(sample_latencies))))
max_latency = str(int(round(max(map(float, sample_latencies)))))
interrog_routine(err_handler, {}, result_holder, sentinel.ipv4, sample_latencies)
mock_check_result.assert_called_once_with(
result_holder.time.strftime(DAY_TIME_FMT), sentinel.ipv4, ave_latency, max_latency,
None, None, None, None, None, None, None)
mock_queries.assert_called_once()
def test_display_opt_int_list():
assert display_opt_int_list([None] * 4) is None
assert display_opt_int_list(["42"] * 4) == "42_42_42_42"
assert display_opt_int_list(["42", None, "42", None]) == "42_None_42_None"
| 46.871795
| 98
| 0.739606
|
a186cf9d5ec94d40c4a4adede25bda48ff6e06e2
| 69,814
|
py
|
Python
|
mpf/tests/test_SegmentDisplay.py
|
Scottacus64/mpf
|
fcfb6c5698b9c7d8bf0eb64b021aaa389ea6478a
|
[
"MIT"
] | null | null | null |
mpf/tests/test_SegmentDisplay.py
|
Scottacus64/mpf
|
fcfb6c5698b9c7d8bf0eb64b021aaa389ea6478a
|
[
"MIT"
] | null | null | null |
mpf/tests/test_SegmentDisplay.py
|
Scottacus64/mpf
|
fcfb6c5698b9c7d8bf0eb64b021aaa389ea6478a
|
[
"MIT"
] | null | null | null |
from unittest.mock import patch, call, ANY, Mock
from mpf.core.rgb_color import RGBColor
from mpf.devices.segment_display.text_stack_entry import TextStackEntry
from mpf.devices.segment_display.transitions import NoTransition, PushTransition, CoverTransition, UncoverTransition, \
WipeTransition, TransitionRunner, SplitTransition
from mpf.devices.segment_display.segment_display_text import SegmentDisplayText
from mpf.platforms.interfaces.segment_display_platform_interface import FlashingType, \
SegmentDisplaySoftwareFlashPlatformInterface
from mpf.tests.MpfFakeGameTestCase import MpfFakeGameTestCase
from mpf.tests.MpfTestCase import test_config
class TestSegmentDisplay(MpfFakeGameTestCase):
def get_config_file(self):
return 'config.yaml'
def get_machine_path(self):
return 'tests/machine_files/segment_display/'
@test_config("game.yaml")
def test_game(self):
"""Test segment displays in a game for the documentation."""
display1 = self.machine.segment_displays["display1"]
display2 = self.machine.segment_displays["display2"]
display3 = self.machine.segment_displays["display3"]
display4 = self.machine.segment_displays["display4"]
display5 = self.machine.segment_displays["display5"]
self.assertEqual("", display1.hw_display.text)
self.assertEqual("", display2.hw_display.text)
self.assertEqual("", display3.hw_display.text)
self.assertEqual("", display4.hw_display.text)
self.assertEqual("", display5.hw_display.text)
self.start_game()
self.assertEqual("0", display1.hw_display.text)
self.assertEqual(FlashingType.FLASH_ALL, display1.hw_display.flashing)
self.assertEqual("", display2.hw_display.text)
self.assertEqual(FlashingType.NO_FLASH, display2.hw_display.flashing)
self.assertEqual("", display3.hw_display.text)
self.assertEqual(FlashingType.NO_FLASH, display3.hw_display.flashing)
self.assertEqual("", display4.hw_display.text)
self.assertEqual(FlashingType.NO_FLASH, display4.hw_display.flashing)
self.assertEqual("1", display5.hw_display.text)
self.assertEqual(FlashingType.NO_FLASH, display5.hw_display.flashing)
self.add_player()
self.assertEqual("0", display1.hw_display.text)
self.assertEqual(FlashingType.FLASH_ALL, display1.hw_display.flashing)
self.assertEqual("0", display2.hw_display.text)
self.assertEqual(FlashingType.NO_FLASH, display2.hw_display.flashing)
self.assertEqual("", display3.hw_display.text)
self.assertEqual(FlashingType.NO_FLASH, display3.hw_display.flashing)
self.assertEqual("", display4.hw_display.text)
self.assertEqual(FlashingType.NO_FLASH, display4.hw_display.flashing)
self.assertEqual("1", display5.hw_display.text)
self.assertEqual(FlashingType.NO_FLASH, display5.hw_display.flashing)
self.machine.game.player.score += 100
self.advance_time_and_run()
self.assertEqual("100", display1.hw_display.text)
self.drain_all_balls()
self.assertEqual("100", display1.hw_display.text)
self.assertEqual(FlashingType.NO_FLASH, display1.hw_display.flashing)
self.assertEqual("0", display2.hw_display.text)
self.assertEqual(FlashingType.FLASH_ALL, display2.hw_display.flashing)
self.assertEqual("", display3.hw_display.text)
self.assertEqual(FlashingType.NO_FLASH, display3.hw_display.flashing)
self.assertEqual("", display4.hw_display.text)
self.assertEqual(FlashingType.NO_FLASH, display4.hw_display.flashing)
self.assertEqual("1", display5.hw_display.text)
self.assertEqual(FlashingType.NO_FLASH, display5.hw_display.flashing)
self.machine.game.player.score += 23
self.advance_time_and_run()
self.assertEqual("100", display1.hw_display.text)
self.assertEqual("23", display2.hw_display.text)
self.drain_all_balls()
self.assertEqual("100", display1.hw_display.text)
self.assertEqual(FlashingType.FLASH_ALL, display1.hw_display.flashing)
self.assertEqual("23", display2.hw_display.text)
self.assertEqual(FlashingType.NO_FLASH, display2.hw_display.flashing)
self.assertEqual("", display3.hw_display.text)
self.assertEqual(FlashingType.NO_FLASH, display3.hw_display.flashing)
self.assertEqual("", display4.hw_display.text)
self.assertEqual(FlashingType.NO_FLASH, display4.hw_display.flashing)
self.assertEqual("2", display5.hw_display.text)
self.assertEqual(FlashingType.NO_FLASH, display5.hw_display.flashing)
self.drain_all_balls()
self.assertEqual("100", display1.hw_display.text)
self.assertEqual(FlashingType.NO_FLASH, display1.hw_display.flashing)
self.assertEqual("23", display2.hw_display.text)
self.assertEqual(FlashingType.FLASH_ALL, display2.hw_display.flashing)
self.assertEqual("", display3.hw_display.text)
self.assertEqual(FlashingType.NO_FLASH, display3.hw_display.flashing)
self.assertEqual("", display4.hw_display.text)
self.assertEqual(FlashingType.NO_FLASH, display4.hw_display.flashing)
self.assertEqual("2", display5.hw_display.text)
self.assertEqual(FlashingType.NO_FLASH, display5.hw_display.flashing)
self.drain_all_balls()
self.assertEqual("100", display1.hw_display.text)
self.assertEqual(FlashingType.FLASH_ALL, display1.hw_display.flashing)
self.assertEqual("23", display2.hw_display.text)
self.assertEqual(FlashingType.NO_FLASH, display2.hw_display.flashing)
self.assertEqual("", display3.hw_display.text)
self.assertEqual(FlashingType.NO_FLASH, display3.hw_display.flashing)
self.assertEqual("", display4.hw_display.text)
self.assertEqual(FlashingType.NO_FLASH, display4.hw_display.flashing)
self.assertEqual("3", display5.hw_display.text)
self.assertEqual(FlashingType.NO_FLASH, display5.hw_display.flashing)
self.drain_all_balls()
self.assertEqual("100", display1.hw_display.text)
self.assertEqual(FlashingType.NO_FLASH, display1.hw_display.flashing)
self.assertEqual("23", display2.hw_display.text)
self.assertEqual(FlashingType.FLASH_ALL, display2.hw_display.flashing)
self.assertEqual("", display3.hw_display.text)
self.assertEqual(FlashingType.NO_FLASH, display3.hw_display.flashing)
self.assertEqual("", display4.hw_display.text)
self.assertEqual(FlashingType.NO_FLASH, display4.hw_display.flashing)
self.assertEqual("3", display5.hw_display.text)
self.assertEqual(FlashingType.NO_FLASH, display5.hw_display.flashing)
# game ended
self.drain_all_balls()
self.assertGameIsNotRunning()
self.assertEqual("100", display1.hw_display.text)
self.assertEqual(FlashingType.NO_FLASH, display1.hw_display.flashing)
self.assertEqual("23", display2.hw_display.text)
self.assertEqual(FlashingType.NO_FLASH, display2.hw_display.flashing)
self.assertEqual("", display3.hw_display.text)
self.assertEqual(FlashingType.NO_FLASH, display3.hw_display.flashing)
self.assertEqual("", display4.hw_display.text)
self.assertEqual(FlashingType.NO_FLASH, display4.hw_display.flashing)
self.assertEqual("", display5.hw_display.text)
self.assertEqual(FlashingType.NO_FLASH, display5.hw_display.flashing)
def test_player(self):
display1 = self.machine.segment_displays["display1"]
display2 = self.machine.segment_displays["display2"]
self.post_event("test_event1")
self.advance_time_and_run()
self.assertEqual("HELLO1", display1.hw_display.text)
self.assertEqual("HELLO2", display2.hw_display.text)
self.post_event("test_event2")
self.advance_time_and_run()
self.assertEqual("", display1.hw_display.text)
self.assertEqual("HELLO2", display2.hw_display.text)
self.post_event("test_flashing")
self.assertEqual(FlashingType.FLASH_ALL, display1.hw_display.flashing)
self.post_event("test_no_flashing")
self.assertEqual(FlashingType.NO_FLASH, display1.hw_display.flashing)
self.post_event("test_event3")
self.advance_time_and_run()
self.assertEqual("", display1.hw_display.text)
self.assertEqual("", display2.hw_display.text)
self.post_event("test_score")
self.advance_time_and_run()
self.assertEqual("1: 0", display1.hw_display.text)
self.assertEqual("2: 0", display2.hw_display.text)
self.machine.variables.set_machine_var("test", 42)
self.advance_time_and_run()
self.assertEqual("1: 0", display1.hw_display.text)
self.assertEqual("2: 42", display2.hw_display.text)
self.start_game()
self.machine.game.player.score += 100
self.advance_time_and_run()
self.assertEqual("1: 100", display1.hw_display.text)
self.assertEqual("2: 42", display2.hw_display.text)
self.machine.game.player.score += 23
self.machine.variables.set_machine_var("test", 1337)
self.advance_time_and_run()
self.assertEqual("1: 123", display1.hw_display.text)
self.assertEqual("2: 1337", display2.hw_display.text)
self.post_event("test_flash")
self.advance_time_and_run(.1)
self.assertEqual("TEST", display1.hw_display.text)
self.assertEqual("2: 1337", display2.hw_display.text)
self.advance_time_and_run(2)
self.assertEqual("1: 123", display1.hw_display.text)
self.assertEqual("2: 1337", display2.hw_display.text)
self.machine.modes["mode1"].start()
self.advance_time_and_run(.1)
self.assertEqual("MODE1", display1.hw_display.text)
self.assertEqual("MODE1", display2.hw_display.text)
self.machine.modes["mode1"].stop()
self.advance_time_and_run(7)
self.assertEqual("1: 123", display1.hw_display.text)
self.assertEqual("2: 1337", display2.hw_display.text)
self.machine.modes["mode1"].start()
self.advance_time_and_run(5)
self.assertEqual("MODE1", display1.hw_display.text)
self.assertEqual("MODE1", display2.hw_display.text)
self.advance_time_and_run(5)
self.assertEqual("MODE1", display1.hw_display.text)
self.assertEqual("2: 1337", display2.hw_display.text)
def test_scoring(self):
display1 = self.machine.segment_displays["display1"]
display2 = self.machine.segment_displays["display2"]
# default scoring
self.post_event("test_score_two_player")
# one player game
self.start_game()
# first display shows score. second empty
self.assertEqual("0", display1.hw_display.text)
self.assertEqual("0", display2.hw_display.text)
# player scores
self.machine.game.player.score += 42
self.advance_time_and_run(.01)
self.assertEqual("42", display1.hw_display.text)
self.assertEqual("0", display2.hw_display.text)
# add player
self.add_player()
self.advance_time_and_run(.01)
self.assertEqual("42", display1.hw_display.text)
self.assertEqual("0", display2.hw_display.text)
@patch("mpf.platforms.interfaces.segment_display_platform_interface.SegmentDisplaySoftwareFlashPlatformInterface.__abstractmethods__", set())
@patch("mpf.platforms.interfaces.segment_display_platform_interface.SegmentDisplaySoftwareFlashPlatformInterface._set_text")
def test_software_flash_platform_interface(self, mock_set_text):
display = SegmentDisplaySoftwareFlashPlatformInterface("1")
display.set_text("12345 ABCDE", FlashingType.NO_FLASH)
display.set_software_flash(False)
self.assertTrue(mock_set_text.called)
mock_set_text.assert_has_calls([call("12345 ABCDE")])
display.set_software_flash(True)
mock_set_text.reset_mock()
display.set_text("12345 ABCDE", FlashingType.FLASH_ALL)
display.set_software_flash(False)
self.assertTrue(mock_set_text.called)
mock_set_text.assert_has_calls([call("12345 ABCDE"), call("")])
display.set_software_flash(True)
mock_set_text.reset_mock()
display.set_text("12345 ABCDE", FlashingType.FLASH_MATCH)
display.set_software_flash(False)
self.assertTrue(mock_set_text.called)
mock_set_text.assert_has_calls([call("12345 ABCDE"), call("12345 ABC ")])
display.set_software_flash(True)
mock_set_text.reset_mock()
display.set_text("12345 ABCDE", FlashingType.FLASH_MASK, "FFFFF______")
display.set_software_flash(False)
self.assertTrue(mock_set_text.called)
mock_set_text.assert_has_calls([call("12345 ABCDE"), call(" ABCDE")])
display.set_software_flash(True)
mock_set_text.reset_mock()
def test_segment_display_text(self):
"""Test the SegmentDisplayText class."""
# text equal to display length
test_text = SegmentDisplayText("test", 4, False, False)
self.assertTrue(isinstance(test_text, list))
self.assertEqual(4, len(test_text))
self.assertEqual("test", SegmentDisplayText.convert_to_str(test_text))
# text longer than display
test_text = SegmentDisplayText("testing", 4, False, False)
self.assertTrue(isinstance(test_text, list))
self.assertEqual(4, len(test_text))
self.assertEqual("ting", SegmentDisplayText.convert_to_str(test_text))
# text shorter than display
test_text = SegmentDisplayText("test", 7, False, False)
self.assertTrue(isinstance(test_text, list))
self.assertEqual(7, len(test_text))
self.assertEqual(" test", SegmentDisplayText.convert_to_str(test_text))
# collapse commas
test_text = SegmentDisplayText("25,000", 7, False, True)
self.assertTrue(isinstance(test_text, list))
self.assertEqual(7, len(test_text))
self.assertTrue(test_text[3].comma)
self.assertEqual(ord("5"), test_text[3].char_code)
self.assertFalse(test_text[4].comma)
self.assertEqual(ord("0"), test_text[4].char_code)
self.assertEqual(" 25,000", SegmentDisplayText.convert_to_str(test_text))
# do not collapse commas
test_text = SegmentDisplayText("25,000", 7, False, False)
self.assertTrue(isinstance(test_text, list))
self.assertEqual(7, len(test_text))
self.assertFalse(test_text[2].comma)
self.assertEqual(ord("5"), test_text[2].char_code)
self.assertFalse(test_text[3].comma)
self.assertEqual(ord(","), test_text[3].char_code)
self.assertEqual(" 25,000", SegmentDisplayText.convert_to_str(test_text))
# collapse dots
test_text = SegmentDisplayText("25.000", 7, True, False)
self.assertTrue(isinstance(test_text, list))
self.assertEqual(7, len(test_text))
self.assertTrue(test_text[3].dot)
self.assertEqual(ord("5"), test_text[3].char_code)
self.assertFalse(test_text[4].dot)
self.assertEqual(ord("0"), test_text[4].char_code)
self.assertEqual(" 25.000", SegmentDisplayText.convert_to_str(test_text))
# do not collapse dots
test_text = SegmentDisplayText("25.000", 7, False, False)
self.assertTrue(isinstance(test_text, list))
self.assertEqual(7, len(test_text))
self.assertFalse(test_text[2].dot)
self.assertEqual(ord("5"), test_text[2].char_code)
self.assertFalse(test_text[3].dot)
self.assertEqual(ord("."), test_text[3].char_code)
self.assertEqual(" 25.000", SegmentDisplayText.convert_to_str(test_text))
# no colors
test_text = SegmentDisplayText("COLOR", 5, False, False)
self.assertTrue(isinstance(test_text, list))
self.assertEqual(5, len(test_text))
colors = SegmentDisplayText.get_colors(test_text)
self.assertIsNone(colors)
# single color
test_text = SegmentDisplayText("COLOR", 5, False, False, [RGBColor("ffffff")])
self.assertTrue(isinstance(test_text, list))
self.assertEqual(5, len(test_text))
colors = SegmentDisplayText.get_colors(test_text)
self.assertEqual(5, len(colors))
self.assertEqual(5, colors.count(RGBColor("ffffff")))
# multiple colors
test_text = SegmentDisplayText("COLOR", 5, False, False,
[RGBColor("white"), RGBColor("red"), RGBColor("green"), RGBColor("blue"),
RGBColor("cyan")])
self.assertTrue(isinstance(test_text, list))
self.assertEqual(5, len(test_text))
colors = SegmentDisplayText.get_colors(test_text)
self.assertEqual(5, len(colors))
self.assertEqual([RGBColor("white"), RGBColor("red"), RGBColor("green"),
RGBColor("blue"), RGBColor("cyan")], colors)
# multiple colors (fewer colors than letters)
test_text = SegmentDisplayText("COLOR", 5, False, False,
[RGBColor("white"), RGBColor("red")])
self.assertTrue(isinstance(test_text, list))
self.assertEqual(5, len(test_text))
colors = SegmentDisplayText.get_colors(test_text)
self.assertEqual(5, len(colors))
self.assertEqual([RGBColor("white"), RGBColor("red"), RGBColor("red"),
RGBColor("red"), RGBColor("red")], colors)
# multiple colors (fewer colors than letters and fewer letters than characters)
test_text = SegmentDisplayText("COLOR", 8, False, False,
[RGBColor("white"), RGBColor("red")])
self.assertTrue(isinstance(test_text, list))
self.assertEqual(8, len(test_text))
colors = SegmentDisplayText.get_colors(test_text)
self.assertEqual(8, len(colors))
self.assertEqual([RGBColor("white"), RGBColor("white"), RGBColor("white"), RGBColor("white"),
RGBColor("red"), RGBColor("red"), RGBColor("red"), RGBColor("red")], colors)
def test_transitions(self):
"""Test segment display text transitions."""
self._test_no_transition()
self._test_push_transition()
self._test_cover_transition()
self._test_uncover_transition()
self._test_wipe_transition()
self._test_split_transition()
def _test_no_transition(self):
"""Test no transition."""
# no transition (with colors)
transition = NoTransition(5, False, False, {'direction': 'right'})
self.assertEqual(1, transition.get_step_count())
self.assertEqual("ABCDE",
SegmentDisplayText.convert_to_str(transition.get_transition_step(0, "12345", "ABCDE")))
self.assertEqual([RGBColor("green"), RGBColor("green"), RGBColor("green"),
RGBColor("green"), RGBColor("green")],
SegmentDisplayText.get_colors(transition.get_transition_step(0, "12345", "ABCDE",
[RGBColor("red")],
[RGBColor("green")])))
with self.assertRaises(AssertionError):
transition.get_transition_step(1, "12345", "ABCDE")
def _test_push_transition(self):
"""Test push transition."""
# push right (with colors)
transition = PushTransition(5, False, False, {'direction': 'right'})
self.assertEqual(5, transition.get_step_count())
self.assertEqual("E1234",
SegmentDisplayText.convert_to_str(transition.get_transition_step(0, "12345", "ABCDE")))
self.assertEqual([RGBColor("green"), RGBColor("red"), RGBColor("red"),
RGBColor("red"), RGBColor("red")],
SegmentDisplayText.get_colors(transition.get_transition_step(0, "12345", "ABCDE",
[RGBColor("red")],
[RGBColor("green")])))
self.assertEqual("DE123",
SegmentDisplayText.convert_to_str(transition.get_transition_step(1, "12345", "ABCDE")))
self.assertEqual([RGBColor("green"), RGBColor("green"), RGBColor("red"),
RGBColor("red"), RGBColor("red")],
SegmentDisplayText.get_colors(transition.get_transition_step(1, "12345", "ABCDE",
[RGBColor("red")],
[RGBColor("green")])))
self.assertEqual("CDE12",
SegmentDisplayText.convert_to_str(transition.get_transition_step(2, "12345", "ABCDE")))
self.assertEqual([RGBColor("green"), RGBColor("green"), RGBColor("green"),
RGBColor("red"), RGBColor("red")],
SegmentDisplayText.get_colors(transition.get_transition_step(2, "12345", "ABCDE",
[RGBColor("red")],
[RGBColor("green")])))
self.assertEqual("BCDE1",
SegmentDisplayText.convert_to_str(transition.get_transition_step(3, "12345", "ABCDE")))
self.assertEqual([RGBColor("green"), RGBColor("green"), RGBColor("green"),
RGBColor("green"), RGBColor("red")],
SegmentDisplayText.get_colors(transition.get_transition_step(3, "12345", "ABCDE",
[RGBColor("red")],
[RGBColor("green")])))
self.assertEqual("ABCDE",
SegmentDisplayText.convert_to_str(transition.get_transition_step(4, "12345", "ABCDE")))
self.assertEqual([RGBColor("green"), RGBColor("green"), RGBColor("green"),
RGBColor("green"), RGBColor("green")],
SegmentDisplayText.get_colors(transition.get_transition_step(4, "12345", "ABCDE",
[RGBColor("red")],
[RGBColor("green")])))
# push left
transition = PushTransition(5, False, False, {'direction': 'left'})
self.assertEqual(5, transition.get_step_count())
self.assertEqual("2345A",
SegmentDisplayText.convert_to_str(transition.get_transition_step(0, "12345", "ABCDE")))
self.assertEqual("345AB",
SegmentDisplayText.convert_to_str(transition.get_transition_step(1, "12345", "ABCDE")))
self.assertEqual("45ABC",
SegmentDisplayText.convert_to_str(transition.get_transition_step(2, "12345", "ABCDE")))
self.assertEqual("5ABCD",
SegmentDisplayText.convert_to_str(transition.get_transition_step(3, "12345", "ABCDE")))
self.assertEqual("ABCDE",
SegmentDisplayText.convert_to_str(transition.get_transition_step(4, "12345", "ABCDE")))
# push right (display larger than text)
transition = PushTransition(8, False, False, {'direction': 'right'})
self.assertEqual(8, transition.get_step_count())
self.assertEqual("E 1234",
SegmentDisplayText.convert_to_str(transition.get_transition_step(0, "12345", "ABCDE")))
self.assertEqual("DE 123",
SegmentDisplayText.convert_to_str(transition.get_transition_step(1, "12345", "ABCDE")))
self.assertEqual("CDE 12",
SegmentDisplayText.convert_to_str(transition.get_transition_step(2, "12345", "ABCDE")))
self.assertEqual("BCDE 1",
SegmentDisplayText.convert_to_str(transition.get_transition_step(3, "12345", "ABCDE")))
self.assertEqual("ABCDE ",
SegmentDisplayText.convert_to_str(transition.get_transition_step(4, "12345", "ABCDE")))
self.assertEqual(" ABCDE ",
SegmentDisplayText.convert_to_str(transition.get_transition_step(5, "12345", "ABCDE")))
self.assertEqual(" ABCDE ",
SegmentDisplayText.convert_to_str(transition.get_transition_step(6, "12345", "ABCDE")))
self.assertEqual(" ABCDE",
SegmentDisplayText.convert_to_str(transition.get_transition_step(7, "12345", "ABCDE")))
# push left (display larger than text)
transition = PushTransition(8, False, False, {'direction': 'left'})
self.assertEqual(8, transition.get_step_count())
self.assertEqual(" 12345 ",
SegmentDisplayText.convert_to_str(transition.get_transition_step(0, "12345", "ABCDE")))
self.assertEqual(" 12345 ",
SegmentDisplayText.convert_to_str(transition.get_transition_step(1, "12345", "ABCDE")))
self.assertEqual("12345 ",
SegmentDisplayText.convert_to_str(transition.get_transition_step(2, "12345", "ABCDE")))
self.assertEqual("2345 A",
SegmentDisplayText.convert_to_str(transition.get_transition_step(3, "12345", "ABCDE")))
self.assertEqual("345 AB",
SegmentDisplayText.convert_to_str(transition.get_transition_step(4, "12345", "ABCDE")))
self.assertEqual("45 ABC",
SegmentDisplayText.convert_to_str(transition.get_transition_step(5, "12345", "ABCDE")))
self.assertEqual("5 ABCD",
SegmentDisplayText.convert_to_str(transition.get_transition_step(6, "12345", "ABCDE")))
self.assertEqual(" ABCDE",
SegmentDisplayText.convert_to_str(transition.get_transition_step(7, "12345", "ABCDE")))
# push right (collapse commas)
transition = PushTransition(5, False, True, {'direction': 'right'})
self.assertEqual(5, transition.get_step_count())
self.assertEqual("0 1,00",
SegmentDisplayText.convert_to_str(transition.get_transition_step(0, "1,000", "25,000")))
self.assertEqual("00 1,0",
SegmentDisplayText.convert_to_str(transition.get_transition_step(1, "1,000", "25,000")))
self.assertEqual("000 1,",
SegmentDisplayText.convert_to_str(transition.get_transition_step(2, "1,000", "25,000")))
self.assertEqual("5,000 ",
SegmentDisplayText.convert_to_str(transition.get_transition_step(3, "1,000", "25,000")))
self.assertEqual("25,000",
SegmentDisplayText.convert_to_str(transition.get_transition_step(4, "1,000", "25,000")))
# push left (collapse commas)
transition = PushTransition(5, False, True, {'direction': 'left'})
self.assertEqual(5, transition.get_step_count())
self.assertEqual("1,0002",
SegmentDisplayText.convert_to_str(transition.get_transition_step(0, "1,000", "25,000")))
self.assertEqual("00025,",
SegmentDisplayText.convert_to_str(transition.get_transition_step(1, "1,000", "25,000")))
self.assertEqual("0025,0",
SegmentDisplayText.convert_to_str(transition.get_transition_step(2, "1,000", "25,000")))
self.assertEqual("025,00",
SegmentDisplayText.convert_to_str(transition.get_transition_step(3, "1,000", "25,000")))
self.assertEqual("25,000",
SegmentDisplayText.convert_to_str(transition.get_transition_step(4, "1,000", "25,000")))
# push right (with text and colors)
transition = PushTransition(5, False, False,
{'direction': 'right', 'text': '-->', 'text_color': [RGBColor("yellow")]})
self.assertEqual(8, transition.get_step_count())
self.assertEqual(">1234",
SegmentDisplayText.convert_to_str(transition.get_transition_step(0, "12345", "ABCDE")))
self.assertEqual([RGBColor("yellow"), RGBColor("red"), RGBColor("red"),
RGBColor("red"), RGBColor("red")],
SegmentDisplayText.get_colors(transition.get_transition_step(0, "12345", "ABCDE",
[RGBColor("red")],
[RGBColor("green")])))
self.assertEqual("->123",
SegmentDisplayText.convert_to_str(transition.get_transition_step(1, "12345", "ABCDE")))
self.assertEqual([RGBColor("yellow"), RGBColor("yellow"), RGBColor("red"),
RGBColor("red"), RGBColor("red")],
SegmentDisplayText.get_colors(transition.get_transition_step(1, "12345", "ABCDE",
[RGBColor("red")],
[RGBColor("green")])))
self.assertEqual("-->12",
SegmentDisplayText.convert_to_str(transition.get_transition_step(2, "12345", "ABCDE")))
self.assertEqual([RGBColor("yellow"), RGBColor("yellow"), RGBColor("yellow"),
RGBColor("red"), RGBColor("red")],
SegmentDisplayText.get_colors(transition.get_transition_step(2, "12345", "ABCDE",
[RGBColor("red")],
[RGBColor("green")])))
self.assertEqual("E-->1",
SegmentDisplayText.convert_to_str(transition.get_transition_step(3, "12345", "ABCDE")))
self.assertEqual([RGBColor("green"), RGBColor("yellow"), RGBColor("yellow"),
RGBColor("yellow"), RGBColor("red")],
SegmentDisplayText.get_colors(transition.get_transition_step(3, "12345", "ABCDE",
[RGBColor("red")],
[RGBColor("green")])))
self.assertEqual("DE-->",
SegmentDisplayText.convert_to_str(transition.get_transition_step(4, "12345", "ABCDE")))
self.assertEqual([RGBColor("green"), RGBColor("green"), RGBColor("yellow"),
RGBColor("yellow"), RGBColor("yellow")],
SegmentDisplayText.get_colors(transition.get_transition_step(4, "12345", "ABCDE",
[RGBColor("red")],
[RGBColor("green")])))
self.assertEqual("CDE--",
SegmentDisplayText.convert_to_str(transition.get_transition_step(5, "12345", "ABCDE")))
self.assertEqual([RGBColor("green"), RGBColor("green"), RGBColor("green"),
RGBColor("yellow"), RGBColor("yellow")],
SegmentDisplayText.get_colors(transition.get_transition_step(5, "12345", "ABCDE",
[RGBColor("red")],
[RGBColor("green")])))
self.assertEqual("BCDE-",
SegmentDisplayText.convert_to_str(transition.get_transition_step(6, "12345", "ABCDE")))
self.assertEqual([RGBColor("green"), RGBColor("green"), RGBColor("green"),
RGBColor("green"), RGBColor("yellow")],
SegmentDisplayText.get_colors(transition.get_transition_step(6, "12345", "ABCDE",
[RGBColor("red")],
[RGBColor("green")])))
self.assertEqual("ABCDE",
SegmentDisplayText.convert_to_str(transition.get_transition_step(7, "12345", "ABCDE")))
self.assertEqual([RGBColor("green"), RGBColor("green"), RGBColor("green"),
RGBColor("green"), RGBColor("green")],
SegmentDisplayText.get_colors(transition.get_transition_step(7, "12345", "ABCDE",
[RGBColor("red")],
[RGBColor("green")])))
# push right (with text that has color = None and colors)
transition = PushTransition(5, False, False,
{'direction': 'right', 'text': '-->', 'text_color': None})
self.assertEqual(8, transition.get_step_count())
self.assertEqual(">1234",
SegmentDisplayText.convert_to_str(transition.get_transition_step(0, "12345", "ABCDE")))
self.assertEqual([RGBColor("green"), RGBColor("red"), RGBColor("red"),
RGBColor("red"), RGBColor("red")],
SegmentDisplayText.get_colors(transition.get_transition_step(0, "12345", "ABCDE",
[RGBColor("red")],
[RGBColor("green")])))
self.assertEqual("->123",
SegmentDisplayText.convert_to_str(transition.get_transition_step(1, "12345", "ABCDE")))
self.assertEqual([RGBColor("green"), RGBColor("green"), RGBColor("red"),
RGBColor("red"), RGBColor("red")],
SegmentDisplayText.get_colors(transition.get_transition_step(1, "12345", "ABCDE",
[RGBColor("red")],
[RGBColor("green")])))
self.assertEqual("-->12",
SegmentDisplayText.convert_to_str(transition.get_transition_step(2, "12345", "ABCDE")))
self.assertEqual([RGBColor("green"), RGBColor("green"), RGBColor("green"),
RGBColor("red"), RGBColor("red")],
SegmentDisplayText.get_colors(transition.get_transition_step(2, "12345", "ABCDE",
[RGBColor("red")],
[RGBColor("green")])))
self.assertEqual("E-->1",
SegmentDisplayText.convert_to_str(transition.get_transition_step(3, "12345", "ABCDE")))
self.assertEqual([RGBColor("green"), RGBColor("green"), RGBColor("green"),
RGBColor("green"), RGBColor("red")],
SegmentDisplayText.get_colors(transition.get_transition_step(3, "12345", "ABCDE",
[RGBColor("red")],
[RGBColor("green")])))
self.assertEqual("DE-->",
SegmentDisplayText.convert_to_str(transition.get_transition_step(4, "12345", "ABCDE")))
self.assertEqual([RGBColor("green"), RGBColor("green"), RGBColor("green"),
RGBColor("green"), RGBColor("green")],
SegmentDisplayText.get_colors(transition.get_transition_step(4, "12345", "ABCDE",
[RGBColor("red")],
[RGBColor("green")])))
self.assertEqual("CDE--",
SegmentDisplayText.convert_to_str(transition.get_transition_step(5, "12345", "ABCDE")))
self.assertEqual([RGBColor("green"), RGBColor("green"), RGBColor("green"),
RGBColor("green"), RGBColor("green")],
SegmentDisplayText.get_colors(transition.get_transition_step(5, "12345", "ABCDE",
[RGBColor("red")],
[RGBColor("green")])))
self.assertEqual("BCDE-",
SegmentDisplayText.convert_to_str(transition.get_transition_step(6, "12345", "ABCDE")))
self.assertEqual([RGBColor("green"), RGBColor("green"), RGBColor("green"),
RGBColor("green"), RGBColor("green")],
SegmentDisplayText.get_colors(transition.get_transition_step(6, "12345", "ABCDE",
[RGBColor("red")],
[RGBColor("green")])))
self.assertEqual("ABCDE",
SegmentDisplayText.convert_to_str(transition.get_transition_step(7, "12345", "ABCDE")))
self.assertEqual([RGBColor("green"), RGBColor("green"), RGBColor("green"),
RGBColor("green"), RGBColor("green")],
SegmentDisplayText.get_colors(transition.get_transition_step(7, "12345", "ABCDE",
[RGBColor("red")],
[RGBColor("green")])))
# push left (with text)
transition = PushTransition(5, False, False, {'direction': 'left', 'text': "<--"})
self.assertEqual(8, transition.get_step_count())
self.assertEqual("2345<",
SegmentDisplayText.convert_to_str(transition.get_transition_step(0, "12345", "ABCDE")))
self.assertEqual("345<-",
SegmentDisplayText.convert_to_str(transition.get_transition_step(1, "12345", "ABCDE")))
self.assertEqual("45<--",
SegmentDisplayText.convert_to_str(transition.get_transition_step(2, "12345", "ABCDE")))
self.assertEqual("5<--A",
SegmentDisplayText.convert_to_str(transition.get_transition_step(3, "12345", "ABCDE")))
self.assertEqual("<--AB",
SegmentDisplayText.convert_to_str(transition.get_transition_step(4, "12345", "ABCDE")))
self.assertEqual("--ABC",
SegmentDisplayText.convert_to_str(transition.get_transition_step(5, "12345", "ABCDE")))
self.assertEqual("-ABCD",
SegmentDisplayText.convert_to_str(transition.get_transition_step(6, "12345", "ABCDE")))
self.assertEqual("ABCDE",
SegmentDisplayText.convert_to_str(transition.get_transition_step(7, "12345", "ABCDE")))
def _test_cover_transition(self):
"""Test cover transition."""
# cover right
transition = CoverTransition(5, False, False, {'direction': 'right'})
self.assertEqual(5, transition.get_step_count())
self.assertEqual("E2345",
SegmentDisplayText.convert_to_str(transition.get_transition_step(0, "12345", "ABCDE")))
self.assertEqual("DE345",
SegmentDisplayText.convert_to_str(transition.get_transition_step(1, "12345", "ABCDE")))
self.assertEqual("CDE45",
SegmentDisplayText.convert_to_str(transition.get_transition_step(2, "12345", "ABCDE")))
self.assertEqual("BCDE5",
SegmentDisplayText.convert_to_str(transition.get_transition_step(3, "12345", "ABCDE")))
self.assertEqual("ABCDE",
SegmentDisplayText.convert_to_str(transition.get_transition_step(4, "12345", "ABCDE")))
# cover right (with text)
transition = CoverTransition(5, False, False, {'direction': 'right', 'text': '-->'})
self.assertEqual(8, transition.get_step_count())
self.assertEqual(">2345",
SegmentDisplayText.convert_to_str(transition.get_transition_step(0, "12345", "ABCDE")))
self.assertEqual("->345",
SegmentDisplayText.convert_to_str(transition.get_transition_step(1, "12345", "ABCDE")))
self.assertEqual("-->45",
SegmentDisplayText.convert_to_str(transition.get_transition_step(2, "12345", "ABCDE")))
self.assertEqual("E-->5",
SegmentDisplayText.convert_to_str(transition.get_transition_step(3, "12345", "ABCDE")))
self.assertEqual("DE-->",
SegmentDisplayText.convert_to_str(transition.get_transition_step(4, "12345", "ABCDE")))
self.assertEqual("CDE--",
SegmentDisplayText.convert_to_str(transition.get_transition_step(5, "12345", "ABCDE")))
self.assertEqual("BCDE-",
SegmentDisplayText.convert_to_str(transition.get_transition_step(6, "12345", "ABCDE")))
self.assertEqual("ABCDE",
SegmentDisplayText.convert_to_str(transition.get_transition_step(7, "12345", "ABCDE")))
# cover left
transition = CoverTransition(5, False, False, {'direction': 'left'})
self.assertEqual(5, transition.get_step_count())
self.assertEqual("1234A",
SegmentDisplayText.convert_to_str(transition.get_transition_step(0, "12345", "ABCDE")))
self.assertEqual("123AB",
SegmentDisplayText.convert_to_str(transition.get_transition_step(1, "12345", "ABCDE")))
self.assertEqual("12ABC",
SegmentDisplayText.convert_to_str(transition.get_transition_step(2, "12345", "ABCDE")))
self.assertEqual("1ABCD",
SegmentDisplayText.convert_to_str(transition.get_transition_step(3, "12345", "ABCDE")))
self.assertEqual("ABCDE",
SegmentDisplayText.convert_to_str(transition.get_transition_step(4, "12345", "ABCDE")))
# cover left (with text)
transition = CoverTransition(5, False, False, {'direction': 'left', 'text': '<--'})
self.assertEqual(8, transition.get_step_count())
self.assertEqual("1234<",
SegmentDisplayText.convert_to_str(transition.get_transition_step(0, "12345", "ABCDE")))
self.assertEqual("123<-",
SegmentDisplayText.convert_to_str(transition.get_transition_step(1, "12345", "ABCDE")))
self.assertEqual("12<--",
SegmentDisplayText.convert_to_str(transition.get_transition_step(2, "12345", "ABCDE")))
self.assertEqual("1<--A",
SegmentDisplayText.convert_to_str(transition.get_transition_step(3, "12345", "ABCDE")))
self.assertEqual("<--AB",
SegmentDisplayText.convert_to_str(transition.get_transition_step(4, "12345", "ABCDE")))
self.assertEqual("--ABC",
SegmentDisplayText.convert_to_str(transition.get_transition_step(5, "12345", "ABCDE")))
self.assertEqual("-ABCD",
SegmentDisplayText.convert_to_str(transition.get_transition_step(6, "12345", "ABCDE")))
self.assertEqual("ABCDE",
SegmentDisplayText.convert_to_str(transition.get_transition_step(7, "12345", "ABCDE")))
def _test_uncover_transition(self):
"""Test uncover transition."""
# uncover right
transition = UncoverTransition(5, False, False, {'direction': 'right'})
self.assertEqual(5, transition.get_step_count())
self.assertEqual("A1234",
SegmentDisplayText.convert_to_str(transition.get_transition_step(0, "12345", "ABCDE")))
self.assertEqual("AB123",
SegmentDisplayText.convert_to_str(transition.get_transition_step(1, "12345", "ABCDE")))
self.assertEqual("ABC12",
SegmentDisplayText.convert_to_str(transition.get_transition_step(2, "12345", "ABCDE")))
self.assertEqual("ABCD1",
SegmentDisplayText.convert_to_str(transition.get_transition_step(3, "12345", "ABCDE")))
self.assertEqual("ABCDE",
SegmentDisplayText.convert_to_str(transition.get_transition_step(4, "12345", "ABCDE")))
# uncover right (with text)
transition = UncoverTransition(5, False, False, {'direction': 'right', 'text': '-->'})
self.assertEqual(8, transition.get_step_count())
self.assertEqual(">1234",
SegmentDisplayText.convert_to_str(transition.get_transition_step(0, "12345", "ABCDE")))
self.assertEqual("->123",
SegmentDisplayText.convert_to_str(transition.get_transition_step(1, "12345", "ABCDE")))
self.assertEqual("-->12",
SegmentDisplayText.convert_to_str(transition.get_transition_step(2, "12345", "ABCDE")))
self.assertEqual("A-->1",
SegmentDisplayText.convert_to_str(transition.get_transition_step(3, "12345", "ABCDE")))
self.assertEqual("AB-->",
SegmentDisplayText.convert_to_str(transition.get_transition_step(4, "12345", "ABCDE")))
self.assertEqual("ABC--",
SegmentDisplayText.convert_to_str(transition.get_transition_step(5, "12345", "ABCDE")))
self.assertEqual("ABCD-",
SegmentDisplayText.convert_to_str(transition.get_transition_step(6, "12345", "ABCDE")))
self.assertEqual("ABCDE",
SegmentDisplayText.convert_to_str(transition.get_transition_step(7, "12345", "ABCDE")))
# uncover left
transition = UncoverTransition(5, False, False, {'direction': 'left'})
self.assertEqual(5, transition.get_step_count())
self.assertEqual("2345E",
SegmentDisplayText.convert_to_str(transition.get_transition_step(0, "12345", "ABCDE")))
self.assertEqual("345DE",
SegmentDisplayText.convert_to_str(transition.get_transition_step(1, "12345", "ABCDE")))
self.assertEqual("45CDE",
SegmentDisplayText.convert_to_str(transition.get_transition_step(2, "12345", "ABCDE")))
self.assertEqual("5BCDE",
SegmentDisplayText.convert_to_str(transition.get_transition_step(3, "12345", "ABCDE")))
self.assertEqual("ABCDE",
SegmentDisplayText.convert_to_str(transition.get_transition_step(4, "12345", "ABCDE")))
# uncover left (with text)
transition = UncoverTransition(5, False, False, {'direction': 'left', 'text': '<--'})
self.assertEqual(8, transition.get_step_count())
self.assertEqual("2345<",
SegmentDisplayText.convert_to_str(transition.get_transition_step(0, "12345", "ABCDE")))
self.assertEqual("345<-",
SegmentDisplayText.convert_to_str(transition.get_transition_step(1, "12345", "ABCDE")))
self.assertEqual("45<--",
SegmentDisplayText.convert_to_str(transition.get_transition_step(2, "12345", "ABCDE")))
self.assertEqual("5<--E",
SegmentDisplayText.convert_to_str(transition.get_transition_step(3, "12345", "ABCDE")))
self.assertEqual("<--DE",
SegmentDisplayText.convert_to_str(transition.get_transition_step(4, "12345", "ABCDE")))
self.assertEqual("--CDE",
SegmentDisplayText.convert_to_str(transition.get_transition_step(5, "12345", "ABCDE")))
self.assertEqual("-BCDE",
SegmentDisplayText.convert_to_str(transition.get_transition_step(6, "12345", "ABCDE")))
self.assertEqual("ABCDE",
SegmentDisplayText.convert_to_str(transition.get_transition_step(7, "12345", "ABCDE")))
def _test_wipe_transition(self):
"""Test wipe transition."""
# wipe right
transition = WipeTransition(5, False, False, {'direction': 'right'})
self.assertEqual(5, transition.get_step_count())
self.assertEqual("A2345",
SegmentDisplayText.convert_to_str(transition.get_transition_step(0, "12345", "ABCDE")))
self.assertEqual("AB345",
SegmentDisplayText.convert_to_str(transition.get_transition_step(1, "12345", "ABCDE")))
self.assertEqual("ABC45",
SegmentDisplayText.convert_to_str(transition.get_transition_step(2, "12345", "ABCDE")))
self.assertEqual("ABCD5",
SegmentDisplayText.convert_to_str(transition.get_transition_step(3, "12345", "ABCDE")))
self.assertEqual("ABCDE",
SegmentDisplayText.convert_to_str(transition.get_transition_step(4, "12345", "ABCDE")))
# wipe right (with text)
transition = WipeTransition(5, False, False, {'direction': 'right', 'text': '-->'})
self.assertEqual(8, transition.get_step_count())
self.assertEqual(">2345",
SegmentDisplayText.convert_to_str(transition.get_transition_step(0, "12345", "ABCDE")))
self.assertEqual("->345",
SegmentDisplayText.convert_to_str(transition.get_transition_step(1, "12345", "ABCDE")))
self.assertEqual("-->45",
SegmentDisplayText.convert_to_str(transition.get_transition_step(2, "12345", "ABCDE")))
self.assertEqual("A-->5",
SegmentDisplayText.convert_to_str(transition.get_transition_step(3, "12345", "ABCDE")))
self.assertEqual("AB-->",
SegmentDisplayText.convert_to_str(transition.get_transition_step(4, "12345", "ABCDE")))
self.assertEqual("ABC--",
SegmentDisplayText.convert_to_str(transition.get_transition_step(5, "12345", "ABCDE")))
self.assertEqual("ABCD-",
SegmentDisplayText.convert_to_str(transition.get_transition_step(6, "12345", "ABCDE")))
self.assertEqual("ABCDE",
SegmentDisplayText.convert_to_str(transition.get_transition_step(7, "12345", "ABCDE")))
# wipe left
transition = WipeTransition(5, False, False, {'direction': 'left'})
self.assertEqual(5, transition.get_step_count())
self.assertEqual("1234E",
SegmentDisplayText.convert_to_str(transition.get_transition_step(0, "12345", "ABCDE")))
self.assertEqual("123DE",
SegmentDisplayText.convert_to_str(transition.get_transition_step(1, "12345", "ABCDE")))
self.assertEqual("12CDE",
SegmentDisplayText.convert_to_str(transition.get_transition_step(2, "12345", "ABCDE")))
self.assertEqual("1BCDE",
SegmentDisplayText.convert_to_str(transition.get_transition_step(3, "12345", "ABCDE")))
self.assertEqual("ABCDE",
SegmentDisplayText.convert_to_str(transition.get_transition_step(4, "12345", "ABCDE")))
# wipe left (with text)
transition = WipeTransition(5, False, False, {'direction': 'left', 'text': '<--'})
self.assertEqual(8, transition.get_step_count())
self.assertEqual("1234<",
SegmentDisplayText.convert_to_str(transition.get_transition_step(0, "12345", "ABCDE")))
self.assertEqual("123<-",
SegmentDisplayText.convert_to_str(transition.get_transition_step(1, "12345", "ABCDE")))
self.assertEqual("12<--",
SegmentDisplayText.convert_to_str(transition.get_transition_step(2, "12345", "ABCDE")))
self.assertEqual("1<--E",
SegmentDisplayText.convert_to_str(transition.get_transition_step(3, "12345", "ABCDE")))
self.assertEqual("<--DE",
SegmentDisplayText.convert_to_str(transition.get_transition_step(4, "12345", "ABCDE")))
self.assertEqual("--CDE",
SegmentDisplayText.convert_to_str(transition.get_transition_step(5, "12345", "ABCDE")))
self.assertEqual("-BCDE",
SegmentDisplayText.convert_to_str(transition.get_transition_step(6, "12345", "ABCDE")))
self.assertEqual("ABCDE",
SegmentDisplayText.convert_to_str(transition.get_transition_step(7, "12345", "ABCDE")))
def _test_split_transition(self):
# split push out (odd display length)
transition = SplitTransition(5, False, False, {'direction': 'out', 'mode': 'push'})
self.assertEqual(3, transition.get_step_count())
self.assertEqual("23C45",
SegmentDisplayText.convert_to_str(transition.get_transition_step(0, "12345", "ABCDE")))
self.assertEqual("3BCD4",
SegmentDisplayText.convert_to_str(transition.get_transition_step(1, "12345", "ABCDE")))
self.assertEqual("ABCDE",
SegmentDisplayText.convert_to_str(transition.get_transition_step(2, "12345", "ABCDE")))
# split push out (even display length)
transition = SplitTransition(6, False, False, {'direction': 'out', 'mode': 'push'})
self.assertEqual(3, transition.get_step_count())
self.assertEqual("23CD45",
SegmentDisplayText.convert_to_str(transition.get_transition_step(0, "123456", "ABCDEF")))
self.assertEqual("3BCDE4",
SegmentDisplayText.convert_to_str(transition.get_transition_step(1, "123456", "ABCDEF")))
self.assertEqual("ABCDEF",
SegmentDisplayText.convert_to_str(transition.get_transition_step(2, "123456", "ABCDEF")))
# split push in (odd display length)
transition = SplitTransition(5, False, False, {'direction': 'in', 'mode': 'push'})
self.assertEqual(3, transition.get_step_count())
self.assertEqual("C234D",
SegmentDisplayText.convert_to_str(transition.get_transition_step(0, "12345", "ABCDE")))
self.assertEqual("BC3DE",
SegmentDisplayText.convert_to_str(transition.get_transition_step(1, "12345", "ABCDE")))
self.assertEqual("ABCDE",
SegmentDisplayText.convert_to_str(transition.get_transition_step(2, "12345", "ABCDE")))
# split push in (even display length)
transition = SplitTransition(6, False, False, {'direction': 'in', 'mode': 'push'})
self.assertEqual(3, transition.get_step_count())
self.assertEqual("C2345D",
SegmentDisplayText.convert_to_str(transition.get_transition_step(0, "123456", "ABCDEF")))
self.assertEqual("BC34DE",
SegmentDisplayText.convert_to_str(transition.get_transition_step(1, "123456", "ABCDEF")))
self.assertEqual("ABCDEF",
SegmentDisplayText.convert_to_str(transition.get_transition_step(2, "123456", "ABCDEF")))
# split wipe out (odd output length)
transition = SplitTransition(5, False, False, {'direction': 'out', 'mode': 'wipe'})
self.assertEqual(3, transition.get_step_count())
self.assertEqual("12C45",
SegmentDisplayText.convert_to_str(transition.get_transition_step(0, "12345", "ABCDE")))
self.assertEqual("1BCD5",
SegmentDisplayText.convert_to_str(transition.get_transition_step(1, "12345", "ABCDE")))
self.assertEqual("ABCDE",
SegmentDisplayText.convert_to_str(transition.get_transition_step(2, "12345", "ABCDE")))
# split wipe out (even output length)
transition = SplitTransition(6, False, False, {'direction': 'out', 'mode': 'wipe'})
self.assertEqual(3, transition.get_step_count())
self.assertEqual("12CD56",
SegmentDisplayText.convert_to_str(transition.get_transition_step(0, "123456", "ABCDEF")))
self.assertEqual("1BCDE6",
SegmentDisplayText.convert_to_str(transition.get_transition_step(1, "123456", "ABCDEF")))
self.assertEqual("ABCDEF",
SegmentDisplayText.convert_to_str(transition.get_transition_step(2, "123456", "ABCDEF")))
# split wipe in (odd output length)
transition = SplitTransition(5, False, False, {'direction': 'in', 'mode': 'wipe'})
self.assertEqual(3, transition.get_step_count())
self.assertEqual("A234E",
SegmentDisplayText.convert_to_str(transition.get_transition_step(0, "12345", "ABCDE")))
self.assertEqual("AB3DE",
SegmentDisplayText.convert_to_str(transition.get_transition_step(1, "12345", "ABCDE")))
self.assertEqual("ABCDE",
SegmentDisplayText.convert_to_str(transition.get_transition_step(2, "12345", "ABCDE")))
# split wipe in (even output length)
transition = SplitTransition(6, False, False, {'direction': 'in', 'mode': 'wipe'})
self.assertEqual(3, transition.get_step_count())
self.assertEqual("A2345F",
SegmentDisplayText.convert_to_str(transition.get_transition_step(0, "123456", "ABCDEF")))
self.assertEqual("AB34EF",
SegmentDisplayText.convert_to_str(transition.get_transition_step(1, "123456", "ABCDEF")))
self.assertEqual("ABCDEF",
SegmentDisplayText.convert_to_str(transition.get_transition_step(2, "123456", "ABCDEF")))
def test_transition_runner(self):
"""Test the transition runner using an iterator."""
transition_iterator = iter(TransitionRunner(self.machine,
PushTransition(5, False, False, {'direction': 'right'}),
"12345", "ABCDE"))
self.assertEqual("E1234",
SegmentDisplayText.convert_to_str(next(transition_iterator)))
self.assertEqual("DE123",
SegmentDisplayText.convert_to_str(next(transition_iterator)))
self.assertEqual("CDE12",
SegmentDisplayText.convert_to_str(next(transition_iterator)))
self.assertEqual("BCDE1",
SegmentDisplayText.convert_to_str(next(transition_iterator)))
self.assertEqual("ABCDE",
SegmentDisplayText.convert_to_str(next(transition_iterator)))
with self.assertRaises(StopIteration):
next(transition_iterator)
@patch("mpf.platforms.virtual.VirtualSegmentDisplay.set_color")
@patch("mpf.platforms.virtual.VirtualSegmentDisplay.set_text")
def test_transitions_with_player(self, mock_set_text, mock_set_color):
self.post_event("test_set_color_to_white")
self.advance_time_and_run(1)
self.assertTrue(mock_set_color.called)
self.assertEqual(1, mock_set_color.call_count)
mock_set_color.assert_has_calls([call([(255, 255, 255)])])
mock_set_color.reset_mock()
self.post_event("test_transition")
self.advance_time_and_run(3)
self.assertTrue(mock_set_text.called)
self.assertEqual(21, mock_set_text.call_count)
red = RGBColor("red")
wht = RGBColor("white")
mock_set_text.assert_has_calls([call(' ', colors=[red, wht, wht, wht, wht, wht, wht, wht, wht, wht], flash_mask='', flashing=FlashingType.NO_FLASH),
call(' ', colors=[red, red, wht, wht, wht, wht, wht, wht, wht, wht], flash_mask='', flashing=FlashingType.NO_FLASH),
call('L ', colors=[red, red, red, wht, wht, wht, wht, wht, wht, wht], flash_mask='', flashing=FlashingType.NO_FLASH),
call('LL ', colors=[red, red, red, red, wht, wht, wht, wht, wht, wht], flash_mask='', flashing=FlashingType.NO_FLASH),
call('OLL ', colors=[red, red, red, red, red, wht, wht, wht, wht, wht], flash_mask='', flashing=FlashingType.NO_FLASH),
call('ROLL ', colors=[red, red, red, red, red, red, wht, wht, wht, wht], flash_mask='', flashing=FlashingType.NO_FLASH),
call('CROLL ', colors=[red, red, red, red, red, red, red, wht, wht, wht], flash_mask='', flashing=FlashingType.NO_FLASH),
call('SCROLL ', colors=[red, red, red, red, red, red, red, red, wht, wht], flash_mask='', flashing=FlashingType.NO_FLASH),
call(' SCROLL ', colors=[red, red, red, red, red, red, red, red, red, wht], flash_mask='', flashing=FlashingType.NO_FLASH),
call(' SCROLL ', colors=[red, red, red, red, red, red, red, red, red, red], flash_mask='', flashing=FlashingType.NO_FLASH),
call(' SCROLL ', colors=None, flash_mask='', flashing=FlashingType.NO_FLASH),
call(' SCROLL ', colors=None, flash_mask='', flashing=FlashingType.NO_FLASH),
call('SCROLL ', colors=None, flash_mask='', flashing=FlashingType.NO_FLASH),
call('CROLL ', colors=None, flash_mask='', flashing=FlashingType.NO_FLASH),
call('ROLL ', colors=None, flash_mask='', flashing=FlashingType.NO_FLASH),
call('OLL ', colors=None, flash_mask='', flashing=FlashingType.NO_FLASH),
call('LL ', colors=None, flash_mask='', flashing=FlashingType.NO_FLASH),
call('L ', colors=None, flash_mask='', flashing=FlashingType.NO_FLASH),
call(' ', colors=None, flash_mask='', flashing=FlashingType.NO_FLASH),
call(' ', colors=None, flash_mask='', flashing=FlashingType.NO_FLASH),
call(' ', colors=None, flash_mask='', flashing=FlashingType.NO_FLASH)])
mock_set_text.reset_mock()
self.post_event("test_transition_2")
self.advance_time_and_run(1)
self.assertTrue(mock_set_text.called)
self.assertEqual(6, mock_set_text.call_count)
mock_set_text.assert_has_calls([call(' 45 ', colors=None, flash_mask='', flashing=FlashingType.NO_FLASH),
call(' 3456 ', colors=None, flash_mask='', flashing=FlashingType.NO_FLASH),
call(' 234567 ', colors=None, flash_mask='', flashing=FlashingType.NO_FLASH),
call(' 12345678 ', colors=None, flash_mask='', flashing=FlashingType.NO_FLASH),
call('0123456789', colors=None, flash_mask='', flashing=FlashingType.NO_FLASH),
call('0123456789', colors=None, flash_mask='', flashing=FlashingType.NO_FLASH)])
mock_set_text.reset_mock()
self.post_event("test_transition_3")
self.advance_time_and_run(1)
self.assertTrue(mock_set_text.called)
self.assertEqual(11, mock_set_text.call_count)
mock_set_text.assert_has_calls([call('A012345678', colors=None, flash_mask='', flashing=FlashingType.NO_FLASH),
call('AB01234567', colors=None, flash_mask='', flashing=FlashingType.NO_FLASH),
call('ABC0123456', colors=None, flash_mask='', flashing=FlashingType.NO_FLASH),
call('ABCD012345', colors=None, flash_mask='', flashing=FlashingType.NO_FLASH),
call('ABCDE01234', colors=None, flash_mask='', flashing=FlashingType.NO_FLASH),
call('ABCDEF0123', colors=None, flash_mask='', flashing=FlashingType.NO_FLASH),
call('ABCDEFG012', colors=None, flash_mask='', flashing=FlashingType.NO_FLASH),
call('ABCDEFGH01', colors=None, flash_mask='', flashing=FlashingType.NO_FLASH),
call('ABCDEFGHI0', colors=None, flash_mask='', flashing=FlashingType.NO_FLASH),
call('ABCDEFGHIJ', colors=None, flash_mask='', flashing=FlashingType.NO_FLASH),
call('ABCDEFGHIJ', colors=None, flash_mask='', flashing=FlashingType.NO_FLASH)])
mock_set_text.reset_mock()
def test_text_stack(self):
"""Test the segment display text stack functionality."""
display1 = self.machine.segment_displays["display1"]
display1.add_text("FIRST")
self.assertEqual("FIRST", display1.text)
self.assertEqual([RGBColor("white")], display1.colors)
self.assertEqual(FlashingType.NO_FLASH, display1.flashing)
# higher priority and with colors, flashing
display1.add_text_entry(
TextStackEntry("SECOND", [RGBColor("red")], FlashingType.FLASH_ALL, "", None, None, 10, "2nd"))
self.assertEqual("SECOND", display1.text)
self.assertEqual([RGBColor("red")], display1.colors)
self.assertEqual(FlashingType.FLASH_ALL, display1.flashing)
# lower priority
display1.add_text_entry(
TextStackEntry("THIRD", [RGBColor("yellow")], FlashingType.FLASH_MASK, "F F F ", None, None, 5, "3rd"))
self.assertEqual("SECOND", display1.text)
self.assertEqual([RGBColor("red")], display1.colors)
self.assertEqual(FlashingType.FLASH_ALL, display1.flashing)
# remove highest priority item from stack
display1.remove_text_by_key("2nd")
self.assertEqual("THIRD", display1.text)
self.assertEqual([RGBColor("yellow")], display1.colors)
self.assertEqual(FlashingType.FLASH_MASK, display1.flashing)
self.assertEqual("F F F ", display1.flash_mask)
# replace current top text
display1.add_text("3rd", 5, "3rd")
self.assertEqual("3rd", display1.text)
self.assertEqual([RGBColor("yellow")], display1.colors)
self.assertEqual(FlashingType.FLASH_MASK, display1.flashing)
self.assertEqual("F F F ", display1.flash_mask)
# change text of lowest item
display1.add_text("1st")
self.assertEqual("3rd", display1.text)
self.assertEqual([RGBColor("yellow")], display1.colors)
self.assertEqual(FlashingType.FLASH_MASK, display1.flashing)
self.assertEqual("F F F ", display1.flash_mask)
# change text, color, and flashing of lowest item and raise its priority
display1.add_text_entry(
TextStackEntry("FIRST", [RGBColor("blue")], FlashingType.NO_FLASH, "", None, None, 20))
self.assertEqual("FIRST", display1.text)
self.assertEqual([RGBColor("blue")], display1.colors)
# remove "FIRST" entry
display1.remove_text_by_key()
self.assertEqual("3rd", display1.text)
self.assertEqual([RGBColor("blue")], display1.colors)
self.assertEqual(FlashingType.NO_FLASH, display1.flashing)
# set flashing
display1.set_flashing(FlashingType.FLASH_MASK, "FFF ")
self.assertEqual([RGBColor("blue")], display1.colors)
self.assertEqual(FlashingType.FLASH_MASK, display1.flashing)
self.assertEqual("FFF ", display1.flash_mask)
# set color
display1.set_color([RGBColor("cyan")])
self.assertEqual([RGBColor("cyan")], display1.colors)
self.assertEqual(FlashingType.FLASH_MASK, display1.flashing)
self.assertEqual("FFF ", display1.flash_mask)
# remove last remaining entry
display1.remove_text_by_key("3rd")
self.assertEqual("", display1.text)
self.assertEqual([RGBColor("cyan")], display1.colors)
self.assertEqual(FlashingType.NO_FLASH, display1.flashing)
self.assertEqual("", display1.flash_mask)
| 61.782301
| 165
| 0.59564
|
db9cd2a0e00d445922681d3adf118e58888365c5
| 32,159
|
py
|
Python
|
fatf/utils/distances.py
|
perellonieto/fat-forensics
|
0fd975ec743c5f44fc29bb2a499a2c1067bdbeff
|
[
"BSD-3-Clause"
] | null | null | null |
fatf/utils/distances.py
|
perellonieto/fat-forensics
|
0fd975ec743c5f44fc29bb2a499a2c1067bdbeff
|
[
"BSD-3-Clause"
] | null | null | null |
fatf/utils/distances.py
|
perellonieto/fat-forensics
|
0fd975ec743c5f44fc29bb2a499a2c1067bdbeff
|
[
"BSD-3-Clause"
] | null | null | null |
"""
The :mod:`fatf.utils.distances` module holds a variety of distance metrics.
The distance metrics and tools implemented in this module are mainly used for
the :class:`fatf.utils.models.models.KNN` model implementation, to measure
distance (and similarity) of data points for various functions in this package
as well as for documentation examples and testing.
"""
# Author: Kacper Sokol <k.sokol@bristol.ac.uk>
# License: new BSD
import inspect
import logging
import warnings
from typing import Callable, Union
import numpy as np
import fatf.utils.array.tools as fuat
import fatf.utils.array.validation as fuav
import fatf.utils.validation as fuv
from fatf.exceptions import IncorrectShapeError
__all__ = ['get_distance_matrix',
'get_point_distance',
'euclidean_distance',
'euclidean_point_distance',
'euclidean_array_distance',
'hamming_distance_base',
'hamming_distance',
'hamming_point_distance',
'hamming_array_distance',
'binary_distance',
'binary_point_distance',
'binary_array_distance',
'check_distance_functionality'] # yapf: disable
logger = logging.getLogger(__name__) # pylint: disable=invalid-name
def _validate_get_distance(
data_array: np.ndarray,
distance_function: Callable[[np.ndarray, np.ndarray], float]) -> bool:
"""
Validates ``data_array`` and ``distance_function`` parameters.
Parameters
----------
data_array : numpy.ndarray
A 2-dimensional numpy array.
distance_function : Callable[[numpy.ndarray, numpy.ndarray], number]
A Python function that takes as an input two 1-dimensional numpy arrays
of equal length and outputs a number representing a distance between
them.
Raises
------
AttributeError
The distance function does not require exactly two parameters.
IncorrectShapeError
The data array is not a 2-dimensional numpy array.
TypeError
The data array is not of a base type (numbers and/or strings). The
distance function is not a Python callable (function).
Returns
-------
is_valid : boolean
``True`` if the parameters are valid, ``False`` otherwise.
"""
is_valid = False
if not fuav.is_2d_array(data_array):
raise IncorrectShapeError('The data_array has to be a 2-dimensional '
'(structured or unstructured) numpy array.')
if not fuav.is_base_array(data_array):
raise TypeError('The data_array has to be of a base type (strings '
'and/or numbers).')
if callable(distance_function):
required_param_n = 0
params = inspect.signature(distance_function).parameters
for param in params:
if params[param].default is params[param].empty:
required_param_n += 1
if required_param_n != 2:
raise AttributeError('The distance function must require exactly '
'2 parameters. Given function requires {} '
'parameters.'.format(required_param_n))
else:
raise TypeError('The distance function should be a Python callable '
'(function).')
is_valid = True
return is_valid
def get_distance_matrix(
data_array: np.ndarray,
distance_function: Callable[[np.ndarray, np.ndarray], float]
) -> np.ndarray:
"""
Computes a distance matrix (2-D) between all rows of the ``data_array``.
Parameters
----------
data_array : numpy.ndarray
A 2-dimensional numpy array for which row-to-row distances will be
computed.
distance_function : Callable[[numpy.ndarray, numpy.ndarray], number]
A Python function that takes as an input two 1-dimensional numpy arrays
of equal length and outputs a number representing a distance between
them. **The distance function is assumed to return the same distance
regardless of the order in which parameters are given.**
Raises
------
AttributeError
The distance function does not require exactly two parameters.
IncorrectShapeError
The data array is not a 2-dimensional numpy array.
TypeError
The data array is not of a base type (numbers and/or strings). The
distance function is not a Python callable (function).
Returns
-------
distances : numpy.ndarray
A square numerical numpy array with distances between all pairs of data
points (rows) in the ``data_array``.
"""
assert _validate_get_distance(data_array,
distance_function), 'Invalid input.'
if fuav.is_structured_array(data_array):
distances = np.zeros((data_array.shape[0], data_array.shape[0]),
dtype=np.float64)
for row_i in range(data_array.shape[0]):
for row_j in range(row_i, data_array.shape[0]):
dist = distance_function(data_array[row_i], data_array[row_j])
distances[row_i, row_j] = dist
distances[row_j, row_i] = dist
else:
def ddf(one_d, two_d):
return np.apply_along_axis(distance_function, 1, two_d, one_d)
distances = np.apply_along_axis(ddf, 1, data_array, data_array)
return distances
def get_point_distance(
data_array: np.ndarray, data_point: Union[np.ndarray, np.void],
distance_function: Callable[[np.ndarray, np.ndarray], float]
) -> np.ndarray:
"""
Computes the distance between a data point and an array of data.
This function computes the distances between the ``data_point`` and all
rows of the ``data_array``.
Parameters
----------
data_array : numpy.ndarray
A 2-dimensional numpy array to which rows distances will be computed.
data_point : Union[numpy.ndarray, numpy.void]
A 1-dimensional numpy array or numpy void (for structured data points)
for which distances to every row of the ``data_array`` will be
computed.
distance_function : Callable[[numpy.ndarray, numpy.ndarray], number]
A Python function that takes as an input two 1-dimensional numpy arrays
of equal length and outputs a number representing a distance between
them. **The distance function is assumed to return the same distance
regardless of the order in which parameters are given.**
Raises
------
AttributeError
The distance function does not require exactly two parameters.
IncorrectShapeError
The data array is not a 2-dimensional numpy array. The data point is
not 1-dimensional. The number of columns in the data array is different
to the number of elements in the data point.
TypeError
The data array or the data point is not of a base type (numbers and/or
strings). The data point and the data array have incomparable dtypes.
The distance function is not a Python callable (function).
Returns
-------
distances : numpy.ndarray
A 1-dimensional numerical numpy array with distances between
``data_point`` and every row of the ``data_array``.
"""
assert _validate_get_distance(data_array,
distance_function), 'Invalid input.'
is_structured = fuav.is_structured_array(data_array)
if not fuav.is_1d_like(data_point):
raise IncorrectShapeError('The data point has to be 1-dimensional '
'numpy array or numpy void (for structured '
'arrays).')
data_point_array = np.asarray([data_point])
if not fuav.is_base_array(data_point_array):
raise TypeError('The data point has to be of a base type (strings '
'and/or numbers).')
if not fuav.are_similar_dtype_arrays(data_array, data_point_array):
raise TypeError('The dtypes of the data set and the data point are '
'too different.')
# Testing only for unstructured as the dtype comparison picks up on a
# different number of columns in a structured array
if not is_structured:
if data_array.shape[1] != data_point_array.shape[1]:
raise IncorrectShapeError('The data point has different number of '
'columns (features) than the data set.')
if is_structured:
distances = np.zeros((data_array.shape[0], ), dtype=np.float64)
for row_i in range(data_array.shape[0]):
distances[row_i] = distance_function(data_array[row_i], data_point)
else:
distances = np.apply_along_axis(distance_function, 1, data_array,
data_point)
return distances
def euclidean_distance(x: Union[np.ndarray, np.void],
y: Union[np.ndarray, np.void]) -> float:
"""
Calculates the Euclidean distance between two 1-dimensional numpy "arrays".
Each of the input arrays can be either a 1D numpy array or a row of a
structured numpy array, i.e. numpy's void.
Parameters
----------
x : Union[numpy.ndarray, numpy.void]
The first numpy array (has to be 1-dimensional and purely numerical).
y : Union[numpy.ndarray, numpy.void]
The second numpy array (has to be 1-dimensional and purely numerical).
Raises
------
IncorrectShapeError
Either of the input arrays is not 1-dimensional or they are not of the
same length.
ValueError
Either of the input arrays is not purely numerical.
Returns
-------
distance : float
Euclidean distance between the two numpy arrays.
"""
# pylint: disable=invalid-name
if not fuav.is_1d_like(x):
raise IncorrectShapeError('The x array should be 1-dimensional.')
if not fuav.is_1d_like(y):
raise IncorrectShapeError('The y array should be 1-dimensional.')
# Transform the arrays to unstructured
x_array = fuat.as_unstructured(x).reshape(-1)
y_array = fuat.as_unstructured(y).reshape(-1)
if not fuav.is_numerical_array(x_array):
raise ValueError('The x array should be purely numerical.')
if not fuav.is_numerical_array(y_array):
raise ValueError('The y array should be purely numerical.')
if x_array.shape[0] != y_array.shape[0]:
raise IncorrectShapeError(('The x and y arrays should have the same '
'length.'))
distance = np.linalg.norm(x_array - y_array)
return distance
def euclidean_point_distance(y: Union[np.ndarray, np.void],
X: np.ndarray) -> np.ndarray:
"""
Calculates the Euclidean distance between ``y`` and every row of ``X``.
``y`` has to be a 1-dimensional numerical numpy array or a row of a
structured numpy array (i.e. numpy's void) and ``X`` has to be a
2-dimensional numerical numpy array. The length of ``y`` has to be the same
as the width of ``X``.
Parameters
----------
y : Union[numpy.ndarray, numpy.void]
A numpy array (has to be 1-dimensional and purely numerical) used to
calculate distances from.
X : numpy.ndarray
A numpy array (has to be 2-dimensional and purely numerical) to which
rows distances are calculated.
Raises
------
IncorrectShapeError
Either ``y`` is not 1-dimensional or ``X`` is not 2-dimensional or the
length of ``y`` is not equal to the number of columns in ``X``.
ValueError
Either of the input arrays is not purely numerical.
Returns
-------
distances : numpy.ndarray
An array of Euclidean distances between ``y`` and every row of ``X``.
"""
# pylint: disable=invalid-name
if not fuav.is_1d_like(y):
raise IncorrectShapeError('The y array should be 1-dimensional.')
if not fuav.is_2d_array(X):
raise IncorrectShapeError('The X array should be 2-dimensional.')
# Transform the arrays to unstructured
y_array = fuat.as_unstructured(y).reshape(-1)
X_array = fuat.as_unstructured(X) # pylint: disable=invalid-name
if not fuav.is_numerical_array(y_array):
raise ValueError('The y array should be purely numerical.')
if not fuav.is_numerical_array(X_array):
raise ValueError('The X array should be purely numerical.')
# Compare shapes
if y_array.shape[0] != X_array.shape[1]:
raise IncorrectShapeError('The number of columns in the X array '
'should the same as the number of elements '
'in the y array.')
distances = np.apply_along_axis(euclidean_distance, 1, X_array, y_array)
return distances
def euclidean_array_distance(X: np.ndarray, Y: np.ndarray) -> np.ndarray:
"""
Calculates the Euclidean distance matrix between rows in ``X`` and ``Y``.
Both ``X`` and ``Y`` have to be 2-dimensional numerical numpy arrays of the
same width.
Parameters
----------
X : numpy.ndarray
A numpy array -- has to be 2-dimensional and purely numerical.
Y : numpy.ndarray
A numpy array -- has to be 2-dimensional and purely numerical.
Raises
------
IncorrectShapeError
Either ``X`` or ``Y`` is not 2-dimensional or ``X`` and ``Y`` do not
have the same number of columns.
ValueError
Either of the input arrays is not purely numerical.
Returns
-------
distance_matrix : numpy.ndarray
An matrix of Euclidean distances between rows in ``X`` and ``Y``.
"""
# pylint: disable=invalid-name
if not fuav.is_2d_array(X):
raise IncorrectShapeError('The X array should be 2-dimensional.')
if not fuav.is_2d_array(Y):
raise IncorrectShapeError('The Y array should be 2-dimensional.')
if not fuav.is_numerical_array(X):
raise ValueError('The X array should be purely numerical.')
if not fuav.is_numerical_array(Y):
raise ValueError('The Y array should be purely numerical.')
# Transform the arrays to unstructured
Y_array = fuat.as_unstructured(Y) # pylint: disable=invalid-name
X_array = fuat.as_unstructured(X) # pylint: disable=invalid-name
# Compare shapes
if Y_array.shape[1] != X_array.shape[1]:
raise IncorrectShapeError('The number of columns in the X array '
'should the same as the number of columns '
'in Y array.')
distance_matrix = np.apply_along_axis(euclidean_point_distance, 1, X_array,
Y_array)
return distance_matrix
def hamming_distance_base(x: str,
y: str,
normalise: bool = False,
equal_length: bool = False) -> Union[int, float]:
"""
Calculates the Hamming distance between two strings ``x`` and ``y``.
If the strings are of a different length they are compared up to the
shorter one's length and the distance between them is increased by their
difference in length.
Parameters
----------
x : string
The first string to be compared.
y : string
The second string to be compared.
normalise : boolean, optional (default=False)
Normalises the distance to be bounded between 0 and 1.
equal_length : boolean, optional (default=False)
Forces the arrays to be of equal length -- raises exception if they are
not.
Raises
------
TypeError
Either ``x`` or ``y`` is not a string.
ValueError
``x`` and ``y`` are of different length when ``equal_length`` parameter
is set to ``True``.
Returns
-------
distance : Number
The Hamming distances between ``x`` and ``y``.
"""
# pylint: disable=invalid-name
if not isinstance(x, str):
raise TypeError('x should be a string.')
if not isinstance(y, str):
raise TypeError('y should be a string.')
x_len = len(x)
y_len = len(y)
distance = abs(x_len - y_len) # type: float
if distance and equal_length:
raise ValueError('Input strings differ in length and the equal_length '
'parameter forces them to be of equal length.')
elif distance:
min_index = min(x_len, y_len)
for i in range(min_index):
distance += 0 if x[i] == y[i] else 1
else:
assert x_len == y_len, 'The strings should be of equal length.'
for i in range(x_len):
distance += 0 if x[i] == y[i] else 1
if normalise:
logger.debug('Hamming distance is being normalised.')
distance /= max(x_len, y_len)
return distance
def hamming_distance(x: Union[np.ndarray, np.void],
y: Union[np.ndarray, np.void],
**kwargs: bool) -> Union[int, float]:
"""
Computes the Hamming distance between 1-dimensional non-numerical arrays.
Each of the input arrays can be either a 1D numpy array or a row of a
structured numpy array, i.e. numpy's void.
Parameters
----------
x : Union[numpy.ndarray, numpy.void]
The first numpy array (has to be 1-dimensional and non-numerical).
y : Union[numpy.ndarray, numpy.void]
The second numpy array (has to be 1-dimensional and non-numerical).
**kwargs : boolean
Keyword arguments that are passed to the
:func:`fatf.utils.distances.hamming_distance_base` function responsible
for calculating the Hamming distance.
Raises
------
IncorrectShapeError
Either of the input arrays is not 1-dimensional or they are of a
different length.
ValueError
Either of the input arrays is not purely textual.
Returns
-------
distance : Union[integer, float]
Hamming distance between the two numpy arrays.
"""
# pylint: disable=invalid-name
if not fuav.is_1d_like(x):
raise IncorrectShapeError('The x array should be 1-dimensional.')
if not fuav.is_1d_like(y):
raise IncorrectShapeError('The y array should be 1-dimensional.')
# Transform the arrays to unstructured
x_array = fuat.as_unstructured(x).reshape(-1)
y_array = fuat.as_unstructured(y).reshape(-1)
if not fuav.is_textual_array(x_array):
raise ValueError('The x array should be textual.')
if not fuav.is_textual_array(y_array):
raise ValueError('The y array should be textual.')
if x_array.shape[0] != y_array.shape[0]:
raise IncorrectShapeError('The x and y arrays should have the same '
'length.')
def kw_hamming_distance(vec):
return hamming_distance_base(vec[0], vec[1], **kwargs)
distance = np.apply_along_axis(kw_hamming_distance, 0,
np.vstack((x_array, y_array)))
distance = distance.sum()
return distance
def hamming_point_distance(y: Union[np.ndarray, np.void], X: np.ndarray,
**kwargs: bool) -> np.ndarray:
"""
Calculates the Hamming distance between ``y`` and every row of ``X``.
``y`` has to be a 1-dimensional numerical numpy array or a row of a
structured numpy array (i.e. numpy's void) and ``X`` has to be a
2-dimensional numerical numpy array. The length of ``y`` has to be the same
as the width of ``X``.
Parameters
----------
y : Union[numpy.ndarray, numpy.void]
A numpy array (has to be 1-dimensional and non-numerical) used to
calculate the distances from.
X : numpy.ndarray
A numpy array (has to be 2-dimensional and non-numerical) to which
rows the distances are calculated.
**kwargs : boolean
Keyword arguments that are passed to the
:func:`fatf.utils.distances.hamming_distance_base` function responsible
for calculating the Hamming distance.
Raises
------
IncorrectShapeError
Either ``y`` is not 1-dimensional or ``X`` is not 2-dimensional or the
length of ``y`` is not equal to the number of columns in ``X``.
ValueError
Either of the input arrays is not purely textual.
Returns
-------
distances : numpy.ndarray
An array of Hamming distances between ``y`` and every row of ``X``.
"""
# pylint: disable=invalid-name
if not fuav.is_1d_like(y):
raise IncorrectShapeError('The y array should be 1-dimensional.')
if not fuav.is_2d_array(X):
raise IncorrectShapeError('The X array should be 2-dimensional.')
# Transform the arrays to unstructured
y_array = fuat.as_unstructured(y).reshape(-1)
X_array = fuat.as_unstructured(X) # pylint: disable=invalid-name
if not fuav.is_textual_array(y_array):
raise ValueError('The y array should be textual.')
if not fuav.is_textual_array(X_array):
raise ValueError('The X array should be textual.')
# Compare shapes
if y_array.shape[0] != X_array.shape[1]:
raise IncorrectShapeError('The number of columns in the X array '
'should the same as the number of elements '
'in the y array.')
distances = np.apply_along_axis(hamming_distance, 1, X_array, y_array,
**kwargs)
return distances
def hamming_array_distance(X: np.ndarray, Y: np.ndarray,
**kwargs: bool) -> np.ndarray:
"""
Calculates the Hamming distance matrix between rows in ``X`` and ``Y``.
Both ``X`` and ``Y`` have to be 2-dimensional numerical numpy arrays of the
same width.
Parameters
----------
X : numpy.ndarray
A numpy array -- has to be 2-dimensional and non-numerical.
Y : numpy.ndarray
A numpy array -- has to be 2-dimensional and non-numerical.
**kwargs : boolean
Keyword arguments that are passed to the
:func:`fatf.utils.distances.hamming_distance_base` function responsible
for calculating the Hamming distance.
Raises
------
IncorrectShapeError
Either ``X`` or ``Y`` is not 2-dimensional or ``X`` and ``Y`` do not
have the same number of columns.
ValueError
Either of the input arrays is not purely textual.
Returns
-------
distance_matrix : numpy.ndarray
An matrix of Hamming distances between rows in ``X`` and ``Y``.
"""
# pylint: disable=invalid-name
if not fuav.is_2d_array(X):
raise IncorrectShapeError('The X array should be 2-dimensional.')
if not fuav.is_2d_array(Y):
raise IncorrectShapeError('The Y array should be 2-dimensional.')
if not fuav.is_textual_array(X):
raise ValueError('The X array should be textual.')
if not fuav.is_textual_array(Y):
raise ValueError('The Y array should be textual.')
# Transform the arrays to unstructured
X_array = fuat.as_unstructured(X) # pylint: disable=invalid-name
Y_array = fuat.as_unstructured(Y) # pylint: disable=invalid-name
# Compare shapes
if X_array.shape[1] != Y_array.shape[1]:
raise IncorrectShapeError('The number of columns in the X array '
'should the same as the number of columns '
'in Y array.')
distance_matrix = np.apply_along_axis(hamming_point_distance, 1, X_array,
Y_array, **kwargs)
return distance_matrix
def binary_distance(x: Union[np.ndarray, np.void],
y: Union[np.ndarray, np.void],
normalise: bool = False) -> Union[int, float]:
"""
Computes the binary distance between two 1-dimensional arrays.
The distance is incremented by one for every position in the two input
arrays where the value does not match. Each of the input arrays can be
either a 1D numpy array or a row of a structured numpy array, i.e. numpy's
void.
Either of the input arrays is not of a base dtype. (See
:func:`fatf.utils.array.validation.is_base_array` function description for
the explanation of a base dtype.)
Parameters
----------
x : Union[numpy.ndarray, numpy.void]
The first numpy array (has to be 1-dimensional).
y : Union[numpy.ndarray, numpy.void]
The second numpy array (has to be 1-dimensional).
normalise : boolean, optional (default=False)
Whether to normalise the binary distance using the input array length.
Raises
------
IncorrectShapeError
Either of the input arrays is not 1-dimensional or they are of a
different length.
Returns
-------
distance : Union[integer, float]
Binary distance between the two numpy arrays.
"""
# pylint: disable=invalid-name
if not fuav.is_1d_like(x):
raise IncorrectShapeError('The x array should be 1-dimensional.')
if not fuav.is_1d_like(y):
raise IncorrectShapeError('The y array should be 1-dimensional.')
# Transform the arrays to unstructured
x_array = fuat.as_unstructured(x).reshape(-1)
y_array = fuat.as_unstructured(y).reshape(-1)
if x_array.shape[0] != y_array.shape[0]:
raise IncorrectShapeError('The x and y arrays should have the same '
'length.')
distance = (x_array != y_array).sum()
if normalise:
logger.debug('Binary distance is being normalised.')
distance /= x_array.shape[0]
return distance
def binary_point_distance(y: Union[np.ndarray, np.void], X: np.ndarray,
**kwargs: bool) -> np.ndarray:
"""
Calculates the binary distance between ``y`` and every row of ``X``.
``y`` has to be a 1-dimensional numpy array or a row of a structured numpy
array (i.e. numpy's void) and ``X`` has to be a 2-dimensional numpy array.
The length of ``y`` has to be the same as the width of ``X``.
Either of the input arrays is not of a base dtype. (See
:func:`fatf.utils.array.validation.is_base_array` function description for
the explanation of a base dtype.)
Parameters
----------
y : Union[numpy.ndarray, numpy.void]
A numpy array (has to be 1-dimensional) used to calculate the distances
from.
X : numpy.ndarray
A numpy array (has to be 2-dimensional) to which rows the distances are
calculated.
**kwargs : boolean
Keyword arguments that are passed to the
:func:`fatf.utils.distances.binary_distance` function responsible for
calculating the binary distance.
Raises
------
IncorrectShapeError
Either ``y`` is not 1-dimensional or ``X`` is not 2-dimensional or the
length of ``y`` is not equal to the number of columns in ``X``.
Returns
-------
distances : numpy.ndarray
An array of binary distances between ``y`` and every row of ``X``.
"""
# pylint: disable=invalid-name
if not fuav.is_1d_like(y):
raise IncorrectShapeError('The y array should be 1-dimensional.')
if not fuav.is_2d_array(X):
raise IncorrectShapeError('The X array should be 2-dimensional.')
# Transform the arrays to unstructured
y_array = fuat.as_unstructured(y).reshape(-1)
X_array = fuat.as_unstructured(X) # pylint: disable=invalid-name
# Compare shapes
if y_array.shape[0] != X_array.shape[1]:
raise IncorrectShapeError('The number of columns in the X array '
'should the same as the number of elements '
'in the y array.')
distances = np.apply_along_axis(binary_distance, 1, X_array, y_array,
**kwargs)
return distances
def binary_array_distance(X: np.ndarray, Y: np.ndarray,
**kwargs: bool) -> np.ndarray:
"""
Calculates the binary distance matrix between rows in ``X`` and ``Y``.
Both ``X`` and ``Y`` have to be 2-dimensional numpy arrays of the same
width.
Either of the input arrays is not of a base dtype. (See
:func:`fatf.utils.array.validation.is_base_array` function description for
the explanation of a base dtype.)
Parameters
----------
X : numpy.ndarray
A numpy array -- has to be 2-dimensional.
Y : numpy.ndarray
A numpy array -- has to be 2-dimensional.
**kwargs : boolean
Keyword arguments that are passed to the
:func:`fatf.utils.distances.binary_distance` function responsible for
calculating the binary distance.
Raises
------
IncorrectShapeError
Either ``X`` or ``Y`` is not 2-dimensional or ``X`` and ``Y`` do not
have the same number of columns.
Returns
-------
distance_matrix : numpy.ndarray
An matrix of binary distances between rows in ``X`` and ``Y``.
"""
# pylint: disable=invalid-name
if not fuav.is_2d_array(X):
raise IncorrectShapeError('The X array should be 2-dimensional.')
if not fuav.is_2d_array(Y):
raise IncorrectShapeError('The Y array should be 2-dimensional.')
# Transform the arrays to unstructured
X_array = fuat.as_unstructured(X)
Y_array = fuat.as_unstructured(Y)
# Compare shapes
if X_array.shape[1] != Y_array.shape[1]:
raise IncorrectShapeError('The number of columns in the X array '
'should the same as the number of columns '
'in Y array.')
distance_matrix = np.apply_along_axis(binary_point_distance, 1, X_array,
Y_array, **kwargs)
return distance_matrix
def check_distance_functionality(distance_function: Callable[..., np.ndarray],
suppress_warning: bool = False) -> bool:
"""
Checks whether a distance function takes exactly 2 required parameters.
.. versionadded:: 0.0.2
The distance function to be checked should calculate a distance matrix
(2-dimensional numpy array) between all of the rows of the two
2-dimensional numpy arrays passed as input to the ``distance_function``.
Parameters
----------
distance_function : Callable[[numpy.ndarray, numpy.ndarray, ...], \
numpy.ndarray]
A function that calculates a distance matrix between all pairs of rows
of the two input arrays.
suppress_warning : boolean, optional (default=False)
A boolean parameter that indicates whether the function should suppress
its warning message. Defaults to False.
Warns
-----
UserWarning
Warns about the details of the required functionality that the distance
function lacks.
Raises
------
TypeError
The ``distance_function`` parameter is not a Python callable or the
``suppress_warning`` parameter is not a boolean.
Returns
-------
is_functional : boolean
A boolean variable that indicates whether the distance function is
valid.
"""
if not callable(distance_function):
raise TypeError('The distance_function parameter should be a Python '
'callable.')
if not isinstance(suppress_warning, bool):
raise TypeError('The suppress_warning parameter should be a boolean.')
required_param_n = fuv.get_required_parameters_number(distance_function)
is_functional = required_param_n == 2
if not is_functional and not suppress_warning:
message = ("The '{}' distance function has incorrect number "
'({}) of the required parameters. It needs to have '
'exactly 2 required parameters. Try using optional '
'parameters if you require more functionality.').format(
distance_function.__name__, required_param_n)
warnings.warn(message, category=UserWarning)
return is_functional
| 37.135104
| 79
| 0.636898
|
245a97a05f9379bbc3872dbea94d61612ac97b95
| 276
|
py
|
Python
|
settings.py
|
nickderobertis/ufrc
|
9fedd37afafb69dce12d7931c6f4968a844b7e9f
|
[
"MIT"
] | null | null | null |
settings.py
|
nickderobertis/ufrc
|
9fedd37afafb69dce12d7931c6f4968a844b7e9f
|
[
"MIT"
] | 11
|
2022-01-12T23:18:20.000Z
|
2022-03-31T03:33:14.000Z
|
settings.py
|
nickderobertis/ufrc
|
9fedd37afafb69dce12d7931c6f4968a844b7e9f
|
[
"MIT"
] | null | null | null |
from typing import Final
from pydantic import BaseSettings
class Settings(BaseSettings):
username: str
password: str
uid: int
group_name: str
class Config:
env_file = ".env"
env_prefix = "UFRC_"
SETTINGS: Final[Settings] = Settings()
| 15.333333
| 38
| 0.663043
|
39ea0b27ca23925c7b0e613fafd05091698adc5e
| 783
|
py
|
Python
|
ingestion/src/metadata/generated/schema/type/collectionDescriptor.py
|
MGoode1/OpenMetadata
|
1a226d50e879f2d00d2eb9e474fecd1c7c1a7a31
|
[
"Apache-2.0"
] | null | null | null |
ingestion/src/metadata/generated/schema/type/collectionDescriptor.py
|
MGoode1/OpenMetadata
|
1a226d50e879f2d00d2eb9e474fecd1c7c1a7a31
|
[
"Apache-2.0"
] | null | null | null |
ingestion/src/metadata/generated/schema/type/collectionDescriptor.py
|
MGoode1/OpenMetadata
|
1a226d50e879f2d00d2eb9e474fecd1c7c1a7a31
|
[
"Apache-2.0"
] | null | null | null |
# generated by datamodel-codegen:
# filename: schema/type/collectionDescriptor.json
# timestamp: 2021-08-23T15:40:00+00:00
from __future__ import annotations
from typing import Optional
from pydantic import AnyUrl, BaseModel, Field
from . import profile
class CollectionInfo(BaseModel):
name: Optional[str] = Field(
None, description='Unique name that identifies a collection.'
)
documentation: Optional[str] = Field(None, description='Description of collection.')
href: Optional[AnyUrl] = Field(
None,
description='URL of the API endpoint where given collections are available.',
)
images: Optional[profile.ImageList] = None
class SchemaForCollectionDescriptor(BaseModel):
collection: Optional[CollectionInfo] = None
| 27.964286
| 88
| 0.734355
|
d4f7441a424cb4e778af47d73debc67fe656000c
| 1,949
|
py
|
Python
|
src/Freshpaper.py
|
pratik2709/freshpaper
|
cd5e1c89405c832baeebb1692887525023d9e114
|
[
"BSD-3-Clause"
] | null | null | null |
src/Freshpaper.py
|
pratik2709/freshpaper
|
cd5e1c89405c832baeebb1692887525023d9e114
|
[
"BSD-3-Clause"
] | null | null | null |
src/Freshpaper.py
|
pratik2709/freshpaper
|
cd5e1c89405c832baeebb1692887525023d9e114
|
[
"BSD-3-Clause"
] | null | null | null |
import logging
import sys
import click
from Constants import BING_IMAGE_DESCRIPTION, NASA_IMAGE_DESCRIPTION, IMAGE_SOURCES
from src.DownloadUtils import DownloadUtils
from src.SystemUtils import SystemUtils
from src.WallpaperUtils import WallpaperUtils
try:
# for python3
from urllib.request import urlopen, urlretrieve, HTTPError, URLError
except ImportError:
# for python2
from urllib import urlretrieve
from urllib2 import urlopen, HTTPError, URLError
if sys.platform.startswith("win32"):
pass
logging.basicConfig(level=logging.INFO, format="%(message)s")
log = logging.getLogger(__name__)
@click.group(invoke_without_command=True)
@click.pass_context
@click.option(
"--source",
default="bing",
type=click.Choice(IMAGE_SOURCES),
help="Source for setting the wallpaper.",
)
def main(context, source):
if context.invoked_subcommand is None:
wallpaper = WallpaperUtils()
downloadUtils = DownloadUtils()
systemUtils = SystemUtils()
directory_name = systemUtils.get_wallpaper_directory() # Wallpaper directory name
freshpaperSources = construct_freshpaper_sources(downloadUtils.download_image_bing,
downloadUtils.download_image_nasa)
try:
download_image = freshpaperSources.get(source)["download"]
image_path = download_image(directory_name)
wallpaper.set_wallpaper(image_path)
except ConnectionError:
image_path = systemUtils.get_saved_wallpaper(directory_name)
wallpaper.set_wallpaper(image_path)
except Exception as e:
log.error(e)
def construct_freshpaper_sources(source1, source2):
return {
"bing": {"download": source1, "description": BING_IMAGE_DESCRIPTION},
"nasa": {"download": source2, "description": NASA_IMAGE_DESCRIPTION},
}
if __name__ == "__main__":
main()
| 30.453125
| 91
| 0.704464
|
02b87647ad196e01fb685a7e48e9f4dca9f7ddd4
| 388
|
py
|
Python
|
from minha_classe import pessoa.py
|
marcosmercurio/Ola-Mundo
|
d5452336a3d83c2d43a8f3c222aa93fcbce845a0
|
[
"MIT"
] | null | null | null |
from minha_classe import pessoa.py
|
marcosmercurio/Ola-Mundo
|
d5452336a3d83c2d43a8f3c222aa93fcbce845a0
|
[
"MIT"
] | null | null | null |
from minha_classe import pessoa.py
|
marcosmercurio/Ola-Mundo
|
d5452336a3d83c2d43a8f3c222aa93fcbce845a0
|
[
"MIT"
] | null | null | null |
from minha_classe import pessoa
nome = str(input("Digite um nome: "))
idade = int(input("Digite a idade: "))
altura = float(input("Digite a altura: "))
peso = float(input("Digite o peso: "))
p1 = pessoa (nome, idade, altura, peso)
imc = peso / altura **2
print('- -' *28 )
print("Dados da Pessoa:")
print('- -' *28 )
print(p1. __repr__(), "Seu IMC é: %.2f" % imc)
print('- -' *28 )
| 24.25
| 46
| 0.613402
|
da52a0a2eb0fbbfab4a94fb3515f505d1238b947
| 1,934
|
py
|
Python
|
test_naucse_render/test_notebook.py
|
Kobzol/naucse_render
|
20509c2b9f20fb727116325a847df2e428a430b9
|
[
"MIT"
] | 3
|
2019-01-19T02:56:21.000Z
|
2019-01-21T12:41:54.000Z
|
test_naucse_render/test_notebook.py
|
Kobzol/naucse_render
|
20509c2b9f20fb727116325a847df2e428a430b9
|
[
"MIT"
] | 23
|
2019-01-29T14:18:32.000Z
|
2022-02-15T13:37:56.000Z
|
test_naucse_render/test_notebook.py
|
Kobzol/naucse_render
|
20509c2b9f20fb727116325a847df2e428a430b9
|
[
"MIT"
] | 5
|
2019-01-18T13:17:13.000Z
|
2021-12-01T13:47:10.000Z
|
from textwrap import dedent
from pathlib import Path
import click
import pygments
import pytest
from naucse_render.notebook import convert_notebook
FIXTURES = Path(__file__).parent / 'fixtures'
@pytest.fixture(scope='module')
def _notebook():
path = FIXTURES / 'notebook.ipynb'
with open(path) as f:
content = f.read()
return convert_notebook(content)
@pytest.fixture()
def notebook(_notebook):
click.echo(pygments.highlight(
_notebook,
lexer=pygments.lexers.get_lexer_by_name('html'),
formatter=pygments.formatters.get_formatter_by_name('console')
))
return _notebook
def test_notebook_markdown_cell_conversion(notebook):
markdown = dedent(r"""
<h2>Markdown</h2>
<p>This is <em>Markdown cell</em>!</p>
<p>It even has some $\LaTeX$:</p>
<p>$$ x = \sin(\pi) $$</p>
""").strip()
assert markdown in notebook
def test_notebook_has_input_prompt(notebook):
input_prompt = '<div class="prompt input_prompt">In [1]:</div>'
assert input_prompt in notebook
def test_notebook_has_output_prompt(notebook):
input_prompt = '<div class="prompt output_prompt">Out[1]:</div>'
assert input_prompt in notebook
def test_notebook_has_highlighted_input_area(notebook):
input_area = dedent("""
<div class=" highlight hl-ipython3">
<pre>
<span></span><span class="nb">print</span><span class="p">(</span>
<span class="s1">'foo'</span><span class="p">)</span>
<span class="mi">5</span> <span class="o">+</span>
<span class="mi">2</span>
</pre>
</div>
""").strip().replace('\n', '')
assert input_area in notebook.replace('\n', '')
@pytest.mark.parametrize('output', ('foo', 7))
def test_notebook_has_desired_outputs(notebook, output):
output_pre = '<pre>{}</pre>'.format(output)
assert output_pre in notebook.replace('\n', '')
| 27.239437
| 74
| 0.654085
|
4bfe6f6f8111c937be158db0a258777893493dda
| 3,123
|
py
|
Python
|
server/tests-py/test_webhook.py
|
devrsi0n/graphql-engine
|
5726852c5414d9a0bcb1b50b439a964438ce0ae4
|
[
"Apache-2.0",
"MIT"
] | 1
|
2019-10-31T19:50:02.000Z
|
2019-10-31T19:50:02.000Z
|
server/tests-py/test_webhook.py
|
devrsi0n/graphql-engine
|
5726852c5414d9a0bcb1b50b439a964438ce0ae4
|
[
"Apache-2.0",
"MIT"
] | 2
|
2021-11-15T17:04:34.000Z
|
2021-11-15T23:04:32.000Z
|
server/tests-py/test_webhook.py
|
devrsi0n/graphql-engine
|
5726852c5414d9a0bcb1b50b439a964438ce0ae4
|
[
"Apache-2.0",
"MIT"
] | null | null | null |
from datetime import datetime, timedelta
import math
import json
import time
import base64
import ruamel.yaml as yaml
import pytest
from test_subscriptions import init_ws_conn
from context import PytestConf
if not PytestConf.config.getoption('--hge-webhook'):
pytest.skip('--hge-webhook is missing, skipping webhook expiration tests', allow_module_level=True)
usefixtures = pytest.mark.usefixtures
@pytest.fixture(scope='function')
def ws_conn_recreate(ws_client):
ws_client.recreate_conn()
def connect_with(hge_ctx, ws_client, headers):
headers['X-Hasura-Role'] = 'user'
headers['X-Hasura-User-Id'] = '1234321'
headers['X-Hasura-Auth-Mode'] = 'webhook'
token = base64.b64encode(json.dumps(headers).encode('utf-8')).decode('utf-8')
headers['Authorization'] = 'Bearer ' + token
payload = {'headers': headers}
init_ws_conn(hge_ctx, ws_client, payload)
EXPIRE_TIME_FORMAT = '%a, %d %b %Y %T GMT'
@usefixtures('ws_conn_recreate')
class TestWebhookSubscriptionExpiry(object):
def test_expiry_with_no_header(self, hge_ctx, ws_client):
# no expiry time => the connextion will remain alive
connect_with(hge_ctx, ws_client, {})
time.sleep(5)
assert ws_client.remote_closed == False, ws_client.remote_closed
def test_expiry_with_expires_header(self, hge_ctx, ws_client):
exp = datetime.utcnow() + timedelta(seconds=6)
connect_with(hge_ctx, ws_client, {
'Expires': exp.strftime(EXPIRE_TIME_FORMAT)
})
time.sleep(4)
assert ws_client.remote_closed == False, ws_client.remote_closed
time.sleep(4)
assert ws_client.remote_closed == True, ws_client.remote_closed
def test_expiry_with_cache_control(self, hge_ctx, ws_client):
connect_with(hge_ctx, ws_client, {
'Cache-Control': 'max-age=6'
})
time.sleep(4)
assert ws_client.remote_closed == False, ws_client.remote_closed
time.sleep(4)
assert ws_client.remote_closed == True, ws_client.remote_closed
def test_expiry_with_both(self, hge_ctx, ws_client):
exp = datetime.utcnow() + timedelta(seconds=6)
connect_with(hge_ctx, ws_client, {
'Expires': exp.strftime(EXPIRE_TIME_FORMAT),
'Cache-Control': 'max-age=10',
})
# cache-control has precedence, so the expiry time will be five seconds
time.sleep(4)
assert ws_client.remote_closed == False, ws_client.remote_closed
time.sleep(4)
assert ws_client.remote_closed == False, ws_client.remote_closed
time.sleep(4)
assert ws_client.remote_closed == True, ws_client.remote_closed
def test_expiry_with_parse_error(self, hge_ctx, ws_client):
exp = datetime.utcnow() + timedelta(seconds=3)
connect_with(hge_ctx, ws_client, {
'Expires': exp.strftime('%a, %d %m %Y %T UTC'),
'Cache-Control': 'maxage=3',
})
# neither will parse, the connection will remain alive
time.sleep(5)
assert ws_client.remote_closed == False, ws_client.remote_closed
| 37.178571
| 103
| 0.682357
|
d14afd2621ffb4e6482e06c7a393c4c8c1d4ffd1
| 39,451
|
py
|
Python
|
smarthome.py
|
jpch/Domoticz-Google-Assistant
|
f020f87aa84d780a5b4421fe0bf207fa440b554a
|
[
"Apache-2.0"
] | null | null | null |
smarthome.py
|
jpch/Domoticz-Google-Assistant
|
f020f87aa84d780a5b4421fe0bf207fa440b554a
|
[
"Apache-2.0"
] | null | null | null |
smarthome.py
|
jpch/Domoticz-Google-Assistant
|
f020f87aa84d780a5b4421fe0bf207fa440b554a
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
import hashlib
import os
import re
import subprocess
import sys
import threading
from collections.abc import Mapping
from itertools import product
import requests
import trait
from auth import *
from const import (DOMOTICZ_TO_GOOGLE_TYPES, ERR_FUNCTION_NOT_SUPPORTED, ERR_PROTOCOL_ERROR, ERR_DEVICE_OFFLINE,
TEMPLATE, ERR_UNKNOWN_ERROR, ERR_CHALLENGE_NEEDED, DOMOTICZ_GET_ALL_DEVICES_URL, domains,
DOMOTICZ_GET_SETTINGS_URL, DOMOTICZ_GET_ONE_DEVICE_URL, DOMOTICZ_GET_SCENES_URL, CONFIGFILE, LOGFILE,
REQUEST_SYNC_BASE_URL, REPORT_STATE_BASE_URL, ATTRS_BRIGHTNESS, ATTRS_FANSPEED,
ATTRS_THERMSTATSETPOINT, ATTRS_COLOR_TEMP, ATTRS_PERCENTAGE, VERSION, DOMOTICZ_GET_VERSION)
from helpers import (configuration, readFile, saveFile, SmartHomeError, SmartHomeErrorNoChallenge, AogState, uptime,
getTunnelUrl, FILE_DIR, logger, ReportState, Auth, logfilepath)
DOMOTICZ_URL = configuration['Domoticz']['ip'] + ':' + configuration['Domoticz']['port']
CREDITS = (configuration['Domoticz']['username'], configuration['Domoticz']['password'])
try:
logger.info("Connecting to Domoticz on %s" % DOMOTICZ_URL)
r = requests.get(
DOMOTICZ_URL + '/json.htm?type=command¶m=addlogmessage&message=Connected to Google Assistant with DZGA v' + VERSION,
auth=CREDITS, timeout=(2, 5))
except Exception as e:
logger.error('Connection to Domoticz refused with error: %s' % e)
try:
import git
repo = git.Repo(FILE_DIR)
except:
repo = None
ReportState = ReportState()
if not ReportState.enable_report_state():
logger.error("Service account key is not found.")
logger.error("Report state will be unavailable")
def checkupdate():
if 'CheckForUpdates' in configuration and configuration['CheckForUpdates'] == True:
try:
r = requests.get(
'https://raw.githubusercontent.com/DewGew/Domoticz-Google-Assistant/' + repo.active_branch.name + '/const.py')
text = r.text
if VERSION not in text:
update = 1
logger.info("========")
logger.info(" New version is availible on Github!")
else:
update = 0
return update
except Exception as e:
logger.error('Connection to Github refused! Check configuration.')
return 0
else:
return 0
update = checkupdate()
# some way to convert a domain type: Domoticz to google
def AogGetDomain(device):
if device["Type"] in ['Light/Switch', 'Lighting 1', 'Lighting 2', 'Lighting 5', 'RFY', 'Value']:
if device["SwitchType"] in ['Blinds', 'Blinds Inverted', 'Venetian Blinds EU', 'Venetian Blinds US',
'Blinds Percentage', 'Blinds Percentage Inverted']:
return domains['blinds']
elif 'Door Lock' == device["SwitchType"]:
return domains['lock']
elif 'Door Lock Inverted' == device["SwitchType"]:
return domains['lockinv']
elif "Door Contact" == device["SwitchType"]:
return domains['door']
elif device["SwitchType"] in ['Push On Button', 'Push Off Button']:
return domains['push']
elif 'Motion Sensor' == device["SwitchType"]:
return domains['sensor']
elif 'Selector' == device["SwitchType"]:
if device['Image'] == 'Fan':
return domains['fan']
else:
return domains['selector']
elif 'Smoke Detector' == device["SwitchType"]:
return domains['smokedetektor']
elif 'Camera_Stream' in configuration and True == device["UsedByCamera"] and True == \
configuration['Camera_Stream']['Enabled']:
return domains['camera']
elif 'Image_Override' in configuration and 'Switch' in configuration['Image_Override'] and device["Image"] in \
configuration['Image_Override']['Switch']:
return domains['switch']
elif 'Image_Override' in configuration and 'Light' in configuration['Image_Override'] and device["Image"] in \
configuration['Image_Override']['Light']:
return domains['light']
elif 'Image_Override' in configuration and 'Media' in configuration['Image_Override'] and device["Image"] in \
configuration['Image_Override']['Media']:
return domains['media']
elif 'Image_Override' in configuration and 'Outlet' in configuration['Image_Override'] and device["Image"] in \
configuration['Image_Override']['Outlet']:
return domains['outlet']
elif 'Image_Override' in configuration and 'Speaker' in configuration['Image_Override'] and device["Image"] in \
configuration['Image_Override']['Speaker']:
return domains['speaker']
elif 'Image_Override' in configuration and 'Fan' in configuration['Image_Override'] and device["Image"] in \
configuration['Image_Override']['Fan']:
return domains['fan']
elif 'Image_Override' in configuration and 'Heating' in configuration['Image_Override'] and device["Image"] in \
configuration['Image_Override']['Heating']:
return domains['heater']
elif 'Image_Override' in configuration and 'Kettle' in configuration['Image_Override'] and device["Image"] in \
configuration['Image_Override']['Kettle']:
return domains['kettle']
else:
return domains['light']
elif 'Blinds' == device["Type"]:
return domains['blinds']
elif 'Group' == device["Type"]:
return domains['group']
elif 'Scene' == device["Type"]:
return domains['scene']
elif device["Type"] in ['Temp', 'Temp + Humidity', 'Temp + Humidity + Baro']:
return domains['temperature']
elif 'Thermostat' == device['Type']:
return domains['thermostat']
elif 'Color Switch' == device["Type"]:
if "Dimmer" == device["SwitchType"]:
return domains['color']
elif "On/Off" == device["SwitchType"]:
logger.info('%s (Idx: %s) is a color switch. To get all functions, set this device as Dimmer in Domoticz', device["Name"], device[
"idx"])
return domains['light']
elif device["SwitchType"] in ['Push On Button', 'Push Off Button']:
return domains['push']
elif 'Security' == device["Type"]:
return domains['security']
return None
def getDesc(state):
if state.domain == domains['scene'] or state.domain == domains['group']:
if 'Scene_Config' in configuration and configuration['Scene_Config'] is not None:
desc = configuration['Scene_Config'].get(int(state.id), None)
return desc
elif 'Device_Config' in configuration and configuration['Device_Config'] is not None:
desc = configuration['Device_Config'].get(int(state.id), None)
return desc
else:
return None
def getDeviceConfig(descstr):
ISLIST = ['nicknames']
rawconfig = re.findall(r'<voicecontrol>(.*?)</voicecontrol>', descstr, re.DOTALL)
if len(rawconfig) > 0:
try:
lines = rawconfig[0].strip().splitlines()
cfgdict = {}
for l in lines:
assign = l.split('=')
varname = assign[0].strip().lower()
if varname != "":
if varname in ISLIST:
allvalues = assign[1].split(',')
varvalues = []
for val in allvalues:
varvalues.append(val.strip())
cfgdict[varname] = varvalues
else:
varvalue = assign[1].strip()
if varvalue.lower() == "true":
varvalue = True
elif varvalue.lower() == "false":
varvalue = False
cfgdict[varname] = varvalue
except:
logger.error('Error parsing device configuration from Domoticz device description:', rawconfig[0])
return None
return cfgdict
return None
def getAog(device):
domain = AogGetDomain(device)
if domain is None:
return None
aog = AogState()
aog.name = device["Name"] # .encode('ascii', 'ignore')
aog.domain = domain
aog.id = device["idx"]
aog.entity_id = domain + aog.id
aog.state = device.get("Data", "Scene")
aog.level = device.get("LevelInt", 0)
aog.temp = device.get("Temp")
aog.humidity = device.get("Humidity")
aog.setpoint = device.get("SetPoint")
aog.color = device.get("Color")
aog.protected = device.get("Protected")
aog.maxdimlevel = device.get("MaxDimLevel")
aog.seccode = settings.get("SecPassword")
aog.secondelay = settings.get("SecOnDelay")
aog.tempunit = settings.get("TempUnit")
aog.battery = device.get("BatteryLevel")
aog.hardware = device.get("HardwareName")
aog.selectorLevelName = device.get("LevelNames")
aog.language = settings.get("Language")
aog.lastupdate = device.get("LastUpdate")
aog.dzvents = settings.get("dzVents")
# Try to get device specific voice control configuration from Domoticz
# Read it from the configuration file if not in Domoticz (for backward compatibility)
desc = getDeviceConfig(device.get("Description"))
if desc is None:
desc = getDesc(aog)
if desc is not None:
dt = desc.get('devicetype', None)
if dt is not None:
if aog.domain in [domains['light']]:
if dt.lower() in ['window', 'gate', 'garage', 'light', 'ac_unit', 'bathtub', 'coffemaker', 'dishwasher', 'dryer', 'fan', 'heater', 'kettle', 'media', 'microwave', 'outlet', 'oven', 'speaker', 'switch', 'vacuum', 'washer', 'waterheater']:
aog.domain = domains[dt.lower()]
if aog.domain in [domains['door']]:
if dt.lower() in ['window', 'gate', 'garage']:
aog.domain = domains[dt.lower()]
if aog.domain in [domains['selector']]:
if dt.lower() in ['vacuum']:
aog.domain = domains[dt.lower()]
n = desc.get('nicknames', None)
if n is not None:
aog.nicknames = n
r = desc.get('room', None)
if r is not None:
aog.room = r
ack = desc.get('ack', False)
if ack:
aog.ack = ack
report_state = desc.get('report_state', True)
if not ReportState.enable_report_state():
aog.report_state = False
if not report_state:
aog.report_state = report_state
if domains['thermostat'] == aog.domain:
at_idx = desc.get('actual_temp_idx', None)
if at_idx is not None:
aog.actual_temp_idx = at_idx
try:
aog.state = str(aogDevs[domains['temperature'] + at_idx].temp)
aogDevs[domains['temperature'] + at_idx].domain = domains['merged'] + aog.id + ')'
except:
logger.error('Merge Error, Cant find temperature device with idx %s', at_idx)
logger.error('Make sure temperature device has a idx below %s', aog.id)
modes_idx = desc.get('selector_modes_idx', None)
if modes_idx is not None:
aog.modes_idx = modes_idx
try:
aog.level = aogDevs[domains['selector'] + modes_idx].level
aog.selectorLevelName = aogDevs[domains['selector'] + modes_idx].selectorLevelName
aogDevs[domains['selector'] + modes_idx].domain = domains['merged'] + aog.id + ')'
except:
logger.error('Merge Error, Cant find selector device with idx %s', modes_idx)
logger.error('Make sure selector has a idx below %s', aog.id)
if aog.domain in [domains['heater'], domains['kettle'], domains['waterheater'], domains['oven']]:
tc_idx = desc.get('merge_thermo_idx', None)
if tc_idx is not None:
aog.merge_thermo_idx = tc_idx
try:
aog.temp = aogDevs[domains['thermostat'] + tc_idx].state
aog.setpoint = aogDevs[domains['thermostat'] + tc_idx].setpoint
aogDevs[domains['thermostat'] + tc_idx].domain = domains['merged'] + aog.id + ')'
except:
logger.error('Merge Error, Cant find thermostat device with idx %s', tc_idx)
logger.error('Make sure thermostat device has a idx below %s', aog.id)
hide = desc.get('hide', False)
if hide:
aog.domain = domains['hidden']
if aog.domain in [domains['camera']]:
aog.report_state = False
if domains['light'] == aog.domain and "Dimmer" == device["SwitchType"]:
aog.attributes = ATTRS_BRIGHTNESS
if domains['fan'] == aog.domain and "Selector" == device["SwitchType"]:
aog.attributes = ATTRS_FANSPEED
if domains['outlet'] == aog.domain and "Dimmer" == device["SwitchType"]:
aog.attributes = ATTRS_BRIGHTNESS
if domains['color'] == aog.domain and "Dimmer" == device["SwitchType"]:
aog.attributes = ATTRS_BRIGHTNESS
if domains['color'] == aog.domain and device["SubType"] in ["RGBWW", "White"]:
aog.attributes = ATTRS_COLOR_TEMP
if domains['thermostat'] == aog.domain and "Thermostat" == device["Type"]:
aog.attributes = ATTRS_THERMSTATSETPOINT
if domains['blinds'] == aog.domain and "Blinds Percentage" == device["SwitchType"]:
aog.attributes = ATTRS_PERCENTAGE
if domains['blinds'] == aog.domain and "Blinds Percentage Inverted" == device["SwitchType"]:
aog.attributes = ATTRS_PERCENTAGE
if domains['vacuum'] == aog.domain and "Selector" == device["SwitchType"]:
aog.attributes = ATTRS_VACCUM_MODES
return aog
aogDevs = {}
deviceList = {}
def getDevices(devices="all", idx="0"):
global aogDevs
global deviceList
url = ""
if "all" == devices:
url = DOMOTICZ_URL + DOMOTICZ_GET_ALL_DEVICES_URL + configuration['Domoticz'][
'roomplan'] + '&filter=all&used=true'
elif "scene" == devices:
url = DOMOTICZ_URL + DOMOTICZ_GET_SCENES_URL
elif "id" == devices:
url = DOMOTICZ_URL + DOMOTICZ_GET_ONE_DEVICE_URL + idx
r = requests.get(url, auth=CREDITS)
if r.status_code == 200:
devs = r.json()['result']
for d in devs:
aog = getAog(d)
if aog is None:
continue
aogDevs[aog.entity_id] = aog
if 'loglevel' in configuration and (configuration['loglevel']).lower() == 'debug':
req = {aog.name: {}}
req[aog.name]['idx'] = int(aog.id)
req[aog.name]['type'] = aog.domain
req[aog.name]['state'] = aog.state
req[aog.name]['lastupdate'] = aog.lastupdate
if aog.nicknames is not None:
req[aog.name]['nicknames'] = aog.nicknames
if aog.modes_idx is not None:
req[aog.name]['modes_idx'] = aog.modes_idx
if aog.hide is not False:
req[aog.name]['hidden'] = aog.hide
if aog.actual_temp_idx is not None:
req[aog.name]['actual_temp_idx'] = aog.actual_temp_idx
if aog.merge_thermo_idx is not None:
req[aog.name]['merge_thermo_idx'] = aog.merge_thermo_idx
req[aog.name]['willReportState'] = aog.report_state
logger.debug(json.dumps(req, indent=2, sort_keys=False, ensure_ascii=False))
devlist = [(d.name, int(d.id), d.domain, d.state, d.room, d.nicknames, d.report_state) for d in aogDevs.values()]
devlist.sort(key=takeSecond)
deviceList = json.dumps(devlist)
def takeSecond(elem):
return elem[1]
def deep_update(target, source):
"""Update a nested dictionary with another nested dictionary."""
for key, value in source.items():
if isinstance(value, Mapping):
target[key] = deep_update(target.get(key, {}), value)
else:
target[key] = value
return target
settings = {}
def getSettings():
"""Get domoticz settings."""
global settings
url = DOMOTICZ_URL + DOMOTICZ_GET_SETTINGS_URL
r = requests.get(url, auth=CREDITS)
if r.status_code == 200:
devs = r.json()
settings['SecPassword'] = devs['SecPassword']
settings["SecOnDelay"] = devs["SecOnDelay"]
settings['TempUnit'] = devs['TempUnit']
settings['Language'] = devs['Language']
getVersion()
logger.debug(json.dumps(settings, indent=2, sort_keys=False, ensure_ascii=False))
def getVersion():
"""Get domoticz settings."""
global settings
url = DOMOTICZ_URL + DOMOTICZ_GET_VERSION
r = requests.get(url, auth=CREDITS)
if r.status_code == 200:
vers = r.json()
settings['dzversion'] = vers['version']
settings['dzVents'] = vers['dzvents_version']
def restartServer():
"""Restart."""
logger.info(' ')
logger.info("Restart server")
logger.info(' ')
os.execv(sys.executable, ['python'] + sys.argv)
class _GoogleEntity:
"""Adaptation of Entity expressed in Google's terms."""
def __init__(self, state):
self.state = state
@property
def entity_id(self):
"""Return entity ID."""
return self.state.entity_id
def traits(self):
"""Return traits for entity."""
state = self.state
domain = state.domain
features = state.attributes
t = [Trait(state) for Trait in trait.TRAITS
if Trait.supported(domain, features)]
return t
def sync_serialize(self):
"""Serialize entity for a SYNC response.
https://developers.google.com/actions/smarthome/create-app#actiondevicessync
"""
state = self.state
enableReport = ReportState.enable_report_state()
traits = self.traits()
# Found no supported traits for this entity
if not traits:
return None
if enableReport:
reportState = state.report_state
else:
reportState = enableReport
device = {
'id': state.entity_id,
'name': {
'name': state.name
},
'attributes': {},
'traits': [trait.name for trait in traits],
'willReportState': reportState,
'deviceInfo': {
'manufacturer': "Domoticz",
"model": state.hardware
},
'type': DOMOTICZ_TO_GOOGLE_TYPES[state.domain],
}
# use aliases
aliases = state.nicknames
if aliases:
device['name']['nicknames'] = aliases
# add room hint if annotated
room = state.room
if room:
device['roomHint'] = room
for trt in traits:
device['attributes'].update(trt.sync_attributes())
return device
def query_serialize(self):
"""Serialize entity for a QUERY response.
https://developers.google.com/actions/smarthome/create-app#actiondevicesquery
"""
state = self.state
# if state.state == STATE_UNAVAILABLE:
# return {'online': False}
attrs = {'online': True}
for trt in self.traits():
deep_update(attrs, trt.query_attributes())
return attrs
def execute(self, command, params, challenge):
"""Execute a command.
https://developers.google.com/actions/smarthome/create-app#actiondevicesexecute
"""
executed = False
for trt in self.traits():
if trt.can_execute(command, params):
ack = self.state.ack # ack is now stored in state
pin = False
if configuration['Domoticz']['switchProtectionPass']:
protect = self.state.protected
else:
protect = False
if protect or self.state.domain == domains['security']:
pin = configuration['Domoticz']['switchProtectionPass']
if self.state.domain == domains['security']:
pin = self.state.seccode
ack = False
if challenge is None:
raise SmartHomeErrorNoChallenge(ERR_CHALLENGE_NEEDED, 'pinNeeded',
'Unable to execute {} for {} - challenge needed '.format(
command, self.state.entity_id))
elif not challenge.get('pin', False):
raise SmartHomeErrorNoChallenge(ERR_CHALLENGE_NEEDED, 'userCancelled',
'Unable to execute {} for {} - challenge needed '.format(
command, self.state.entity_id))
elif True == protect and pin != challenge.get('pin'):
raise SmartHomeErrorNoChallenge(ERR_CHALLENGE_NEEDED, 'challengeFailedPinNeeded',
'Unable to execute {} for {} - challenge needed '.format(
command, self.state.entity_id))
elif self.state.domain == domains['security'] and pin != hashlib.md5(
str.encode(challenge.get('pin'))).hexdigest():
raise SmartHomeErrorNoChallenge(ERR_CHALLENGE_NEEDED, 'challengeFailedPinNeeded',
'Unable to execute {} for {} - challenge needed '.format(
command, self.state.entity_id))
if ack:
if challenge is None:
raise SmartHomeErrorNoChallenge(ERR_CHALLENGE_NEEDED, 'ackNeeded',
'Unable to execute {} for {} - challenge needed '.format(
command, self.state.entity_id))
elif not challenge.get('ack', False):
raise SmartHomeErrorNoChallenge(ERR_CHALLENGE_NEEDED, 'userCancelled',
'Unable to execute {} for {} - challenge needed '.format(
command, self.state.entity_id))
trt.execute(command, params)
executed = True
break
if not executed:
raise SmartHomeError(ERR_FUNCTION_NOT_SUPPORTED,
'Unable to execute {} for {}'.format(command, self.state.entity_id))
def async_update(self):
"""Update the entity with latest info from Domoticz."""
if self.state.domain == domains['group'] or self.state.domain == domains['scene']:
getDevices('scene')
else:
getDevices('id', self.state.id)
class SmartHomeReqHandler(OAuthReqHandler):
global smarthomeControlMappings
global aogDevs
def __init__(self, *args, **kwargs):
super(SmartHomeReqHandler, self).__init__(*args, **kwargs)
self._request_id = None
def report_state(self, states, token):
"""Send a state report to Google."""
data = {
'requestId': self._request_id,
'agentUserId': token.get('userAgentId', None),
'payload': {
'devices': {
'states': states,
}
}
}
ReportState.call_homegraph_api(REPORT_STATE_BASE_URL, data)
def smarthome_process(self, message, token):
request_id = self._request_id # type: str
inputs = message.get('inputs') # type: list
if len(inputs) != 1:
return {
'requestId': request_id,
'payload': {'errorCode': ERR_PROTOCOL_ERROR}
}
handler = smarthomeControlMappings.get(inputs[0].get('intent'))
if handler is None:
return {'requestId': request_id, 'payload': {'errorCode': ERR_PROTOCOL_ERROR}}
try:
result = handler(self, inputs[0].get('payload'), token)
return {'requestId': request_id, 'payload': result}
except SmartHomeError as err:
return {'requestId': request_id, 'payload': {'errorCode': err.code}}
except Exception as e:
logger.error(e)
return {'requestId': request_id, 'payload': {'errorCode': ERR_UNKNOWN_ERROR}}
def smarthome_post(self, s):
logger.debug(s.headers)
a = s.headers.get('Authorization', None)
token = None
if a is not None:
types, tokenH = a.split()
if types.lower() == 'bearer':
token = Auth['tokens'].get(tokenH, None)
if token is None:
raise SmartHomeError(ERR_PROTOCOL_ERROR, 'not authorized access!!')
message = json.loads(s.body)
self._request_id = message.get('requestId')
logger.info("Request " + json.dumps(message, indent=2, sort_keys=True, ensure_ascii=False))
response = self.smarthome_process(message, token)
try:
if 'errorCode' in response['payload']:
logger.error('Error handling message %s: %s' % (message, response['payload']))
except:
pass
s.send_json(200, json.dumps(response, ensure_ascii=False).encode('utf-8'), True)
def smarthome(self, s):
s.send_message(500, "not supported")
def forceDevicesSync(self):
userAgent = self.getUserAgent()
enableReport = ReportState.enable_report_state()
if userAgent is None:
return 500 # internal error
data = {"agentUserId": userAgent}
if enableReport:
r = ReportState.call_homegraph_api(REQUEST_SYNC_BASE_URL, data)
elif 'Homegraph_API_Key' in configuration and configuration['Homegraph_API_Key'] != 'ADD_YOUR HOMEGRAPH_API_KEY_HERE':
r = ReportState.call_homegraph_api_key(REQUEST_SYNC_BASE_URL, data)
else:
logger.error("No configuration for request_sync available")
return r
def syncDevices(self, s):
user = self.getSessionUser()
if user is None or user.get('uid', '') == '':
s.redirect('login?redirect_uri={0}'.format('sync'))
return
r = self.forceDevicesSync()
s.send_message(200, 'Synchronization request sent, status_code: ' + str(r))
def restartServer(self, s):
user = self.getSessionUser()
if user is None or user.get('uid', '') == '':
s.redirect('login?redirect_uri={0}'.format('restart'))
return
s.send_message(200, 'Restart request sent, status_code: True')
restartServer()
def settings(self, s):
user = self.getSessionUser()
if user is None or user.get('uid', '') == '':
s.redirect('login?redirect_uri={0}'.format('settings'))
return
update = checkupdate()
confJSON = json.dumps(configuration)
public_url = getTunnelUrl()
message = ''
meta = '<!-- <meta http-equiv="refresh" content="5"> -->'
code = readFile(os.path.join(FILE_DIR, CONFIGFILE))
logs = readFile(os.path.join(logfilepath, LOGFILE))
template = TEMPLATE.format(message=message, uptime=uptime(), list=deviceList, meta=meta, code=code,
conf=confJSON, public_url=public_url, logs=logs, update=update,
branch=repo.active_branch.name, dzversion=settings['dzversion'])
s.send_message(200, template)
def settings_post(self, s):
enableReport = ReportState.enable_report_state()
update = checkupdate()
confJSON = json.dumps(configuration)
public_url = getTunnelUrl()
logs = readFile(os.path.join(logfilepath, LOGFILE))
code = readFile(os.path.join(FILE_DIR, CONFIGFILE))
meta = '<!-- <meta http-equiv="refresh" content="5"> -->'
if s.form.get("save"):
textToSave = s.form.get("save", None)
codeToSave = textToSave.replace("+", " ")
saveFile(CONFIGFILE, codeToSave)
message = 'Config saved'
logger.info(message)
logs = readFile(os.path.join(logfilepath, LOGFILE))
code = readFile(os.path.join(FILE_DIR, CONFIGFILE))
template = TEMPLATE.format(message=message, uptime=uptime(), list=deviceList, meta=meta, code=code,
conf=confJSON, public_url=public_url, logs=logs, update=update,
branch=repo.active_branch.name, dzversion=settings['dzversion'])
s.send_message(200, template)
if s.form.get("backup"):
codeToSave = readFile(os.path.join(FILE_DIR, CONFIGFILE))
saveFile('config/config.yaml.bak', codeToSave)
message = 'Backup saved'
logger.info(message)
logs = readFile(os.path.join(logfilepath, LOGFILE))
template = TEMPLATE.format(message=message, uptime=uptime(), list=deviceList, meta=meta, code=code,
conf=confJSON, public_url=public_url, logs=logs, update=update,
branch=repo.active_branch.name, dzversion=settings['dzversion'])
s.send_message(200, template)
if s.form.get("restart"):
message = 'Restart Server, please wait a minute!'
meta = '<meta http-equiv="refresh" content="20">'
code = ''
logs = ''
template = TEMPLATE.format(message=message, uptime=uptime(), list=deviceList, meta=meta, code=code,
conf=confJSON, public_url=public_url, logs=logs, update=update,
branch=repo.active_branch.name, dzversion=settings['dzversion'])
s.send_message(200, template)
restartServer()
if s.form.get("sync"):
if 'Homegraph_API_Key' in configuration and configuration['Homegraph_API_Key'] != 'ADD_YOUR HOMEGRAPH_API_KEY_HERE' or enableReport == True:
r = self.forceDevicesSync()
time.sleep(0.5)
if r:
message = 'Devices syncronized'
else:
message = 'Homegraph api key not valid!'
else:
message = 'Add Homegraph api key or a Homegraph Service Account json file to sync devices here!'
logs = readFile(os.path.join(logfilepath, LOGFILE))
template = TEMPLATE.format(message=message, uptime=uptime(), list=deviceList, meta=meta, code=code,
conf=confJSON, public_url=public_url, logs=logs, update=update,
branch=repo.active_branch.name, dzversion=settings['dzversion'])
s.send_message(200, template)
if s.form.get("reload"):
message = ''
template = TEMPLATE.format(message=message, uptime=uptime(), list=deviceList, meta=meta, code=code,
conf=confJSON, public_url=public_url, logs=logs, update=update,
branch=repo.active_branch.name, dzversion=settings['dzversion'])
s.send_message(200, template)
if s.form.get("deletelogs"):
logfile = os.path.join(logfilepath, LOGFILE)
if os.path.exists(logfile):
f = open(logfile, 'w')
f.close()
logger.info('Logs removed by user')
message = 'Logs removed'
logs = readFile(os.path.join(logfilepath, LOGFILE))
template = TEMPLATE.format(message=message, uptime=uptime(), list=deviceList, meta=meta, code=code,
conf=confJSON, public_url=public_url, logs=logs, update=update,
branch=repo.active_branch.name, dzversion=settings['dzversion'])
s.send_message(200, template)
if s.form.get("update"):
repo.git.reset('--hard')
repo.remotes.origin.pull()
message = 'Updating to latest ' + repo.active_branch.name + ', please wait a minute!'
meta = '<meta http-equiv="refresh" content="20">'
template = TEMPLATE.format(message=message, uptime=uptime(), list=deviceList, meta=meta, code=code,
conf=confJSON, public_url=public_url, logs=logs, update=update,
branch=repo.active_branch.name, dzversion=settings['dzversion'])
s.send_message(200, template)
subprocess.call(['pip', 'install','-r', os.path.join(FILE_DIR, 'requirements/pip-requirements.txt')])
restartServer()
def delay_report_state(self, states, token):
time.sleep(3)
self.report_state(states, token)
def smarthome_sync(self, payload, token):
"""Handle action.devices.SYNC request.
https://developers.google.com/actions/smarthome/create-app#actiondevicessync
"""
devices = []
states = {}
aogDevs.clear()
getDevices() # sync all devices
getSettings()
enableReport = ReportState.enable_report_state()
for state in aogDevs.values():
entity = _GoogleEntity(state)
serialized = entity.sync_serialize()
if serialized is None:
continue
devices.append(serialized)
if state.report_state:
try:
states[entity.entity_id] = entity.query_serialize()
except:
continue
if enableReport:
t = threading.Thread(target=self.delay_report_state, args=(states, token)).start()
return {
'agentUserId': token.get('userAgentId', None),
'devices': devices,
}
def smarthome_query(self, payload, token):
"""Handle action.devices.QUERY request.
https://developers.google.com/actions/smarthome/create-app#actiondevicesquery
"""
enableReport = ReportState.enable_report_state()
response = {}
devices = {}
getDevices()
for device in payload.get('devices', []):
devid = device['id']
#_GoogleEntity(aogDevs.get(devid, None)).async_update()
state = aogDevs.get(devid, None)
if not state:
# If we can't find a state, the device is offline
devices[devid] = {'online': False}
continue
e = _GoogleEntity(state)
devices[devid] = e.query_serialize()
response = {'devices': devices}
logger.info("Response " + json.dumps(response, indent=2, sort_keys=True, ensure_ascii=False))
if state.report_state == True and enableReport == True:
self.report_state(devices, token)
return {'devices': devices}
def smarthome_exec(self, payload, token):
"""Handle action.devices.EXECUTE request.
https://developers.google.com/actions/smarthome/create-app#actiondevicesexecute
"""
entities = {}
results = {}
for command in payload['commands']:
for device, execution in product(command['devices'],
command['execution']):
entity_id = device['id']
# Happens if error occurred. Skip entity for further processing
if entity_id in results:
continue
if entity_id not in entities:
if len(aogDevs) == 0:
getDevices()
getSettings()
state = aogDevs.get(entity_id, None)
if state is None:
results[entity_id] = {'ids': [entity_id], 'status': 'ERROR', 'errorCode': ERR_DEVICE_OFFLINE}
continue
entities[entity_id] = _GoogleEntity(state)
try:
entities[entity_id].execute(execution['command'], execution.get('params', {}),
execution.get('challenge', None))
except SmartHomeError as err:
results[entity_id] = {'ids': [entity_id], 'status': 'ERROR', 'errorCode': err.code}
logger.error(err)
except SmartHomeErrorNoChallenge as err:
results[entity_id] = {'ids': [entity_id], 'status': 'ERROR', 'errorCode': err.code,
'challengeNeeded': {'type': err.desc}}
logger.error(err)
final_results = list(results.values())
for entity in entities.values():
if entity.entity_id in results:
continue
entity.async_update()
final_results.append({'ids': [entity.entity_id], 'status': 'SUCCESS', 'states': entity.query_serialize()})
return {'commands': final_results}
def smarthome_disconnect(self, payload, token):
"""Handle action.devices.DISCONNECT request.
https://developers.google.com/assistant/smarthome/develop/process-intents#DISCONNECT
"""
return None
if 'userinterface' in configuration and configuration['userinterface'] == True:
smarthomeGetMappings = {"/smarthome": SmartHomeReqHandler.smarthome,
"/sync": SmartHomeReqHandler.syncDevices,
"/settings": SmartHomeReqHandler.settings,
"/restart": SmartHomeReqHandler.restartServer}
smarthomePostMappings = {"/smarthome": SmartHomeReqHandler.smarthome_post,
"/settings": SmartHomeReqHandler.settings_post}
else:
smarthomeGetMappings = {"/smarthome": SmartHomeReqHandler.smarthome,
"/sync": SmartHomeReqHandler.syncDevices,
"/restart": SmartHomeReqHandler.restartServer}
smarthomePostMappings = {"/smarthome": SmartHomeReqHandler.smarthome_post}
smarthomeControlMappings = {'action.devices.SYNC': SmartHomeReqHandler.smarthome_sync,
'action.devices.QUERY': SmartHomeReqHandler.smarthome_query,
'action.devices.EXECUTE': SmartHomeReqHandler.smarthome_exec,
'action.devices.DISCONNECT': SmartHomeReqHandler.smarthome_disconnect}
| 42.374866
| 253
| 0.568934
|
6c513039f259aac405df0c2c0c72b73097582a38
| 7,911
|
py
|
Python
|
tests/test_security_oauth2.py
|
dmig/fastapi
|
497e5e6257a282162a435b4d37f82d567fe73195
|
[
"MIT"
] | null | null | null |
tests/test_security_oauth2.py
|
dmig/fastapi
|
497e5e6257a282162a435b4d37f82d567fe73195
|
[
"MIT"
] | 1
|
2021-07-24T15:25:13.000Z
|
2021-07-24T15:25:13.000Z
|
tests/test_security_oauth2.py
|
dmig/fastapi
|
497e5e6257a282162a435b4d37f82d567fe73195
|
[
"MIT"
] | 1
|
2020-04-19T17:43:43.000Z
|
2020-04-19T17:43:43.000Z
|
import pytest
from fastapi import Depends, FastAPI, Security
from fastapi.security import OAuth2, OAuth2PasswordRequestFormStrict
from fastapi.testclient import TestClient
from pydantic import BaseModel
app = FastAPI()
reusable_oauth2 = OAuth2(
flows={
"password": {
"tokenUrl": "/token",
"scopes": {"read:users": "Read the users", "write:users": "Create users"},
}
}
)
class User(BaseModel):
username: str
# Here we use string annotations to test them
def get_current_user(oauth_header: "str" = Security(reusable_oauth2)):
user = User(username=oauth_header)
return user
@app.post("/login")
# Here we use string annotations to test them
def read_current_user(form_data: "OAuth2PasswordRequestFormStrict" = Depends()):
return form_data
@app.get("/users/me")
# Here we use string annotations to test them
def read_current_user(current_user: "User" = Depends(get_current_user)):
return current_user
client = TestClient(app)
openapi_schema = {
"openapi": "3.0.2",
"info": {"title": "FastAPI", "version": "0.1.0"},
"paths": {
"/login": {
"post": {
"responses": {
"200": {
"description": "Successful Response",
"content": {"application/json": {"schema": {}}},
},
"422": {
"description": "Validation Error",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/HTTPValidationError"
}
}
},
},
},
"summary": "Read Current User",
"operationId": "read_current_user_login_post",
"requestBody": {
"content": {
"application/x-www-form-urlencoded": {
"schema": {
"$ref": "#/components/schemas/Body_read_current_user_login_post"
}
}
},
"required": True,
},
}
},
"/users/me": {
"get": {
"responses": {
"200": {
"description": "Successful Response",
"content": {"application/json": {"schema": {}}},
}
},
"summary": "Read Current User",
"operationId": "read_current_user_users_me_get",
"security": [{"OAuth2": []}],
}
},
},
"components": {
"schemas": {
"Body_read_current_user_login_post": {
"title": "Body_read_current_user_login_post",
"required": ["grant_type", "username", "password"],
"type": "object",
"properties": {
"grant_type": {
"title": "Grant Type",
"pattern": "password",
"type": "string",
},
"username": {"title": "Username", "type": "string"},
"password": {"title": "Password", "type": "string"},
"scope": {"title": "Scope", "type": "string", "default": ""},
"client_id": {"title": "Client Id", "type": "string"},
"client_secret": {"title": "Client Secret", "type": "string"},
},
},
"ValidationError": {
"title": "ValidationError",
"required": ["loc", "msg", "type"],
"type": "object",
"properties": {
"loc": {
"title": "Location",
"type": "array",
"items": {"type": "string"},
},
"msg": {"title": "Message", "type": "string"},
"type": {"title": "Error Type", "type": "string"},
},
},
"HTTPValidationError": {
"title": "HTTPValidationError",
"type": "object",
"properties": {
"detail": {
"title": "Detail",
"type": "array",
"items": {"$ref": "#/components/schemas/ValidationError"},
}
},
},
},
"securitySchemes": {
"OAuth2": {
"type": "oauth2",
"flows": {
"password": {
"scopes": {
"read:users": "Read the users",
"write:users": "Create users",
},
"tokenUrl": "/token",
}
},
}
},
},
}
def test_openapi_schema():
response = client.get("/openapi.json")
assert response.status_code == 200, response.text
assert response.json() == openapi_schema
def test_security_oauth2():
response = client.get("/users/me", headers={"Authorization": "Bearer footokenbar"})
assert response.status_code == 200, response.text
assert response.json() == {"username": "Bearer footokenbar"}
def test_security_oauth2_password_other_header():
response = client.get("/users/me", headers={"Authorization": "Other footokenbar"})
assert response.status_code == 200, response.text
assert response.json() == {"username": "Other footokenbar"}
def test_security_oauth2_password_bearer_no_header():
response = client.get("/users/me")
assert response.status_code == 403, response.text
assert response.json() == {"detail": "Not authenticated"}
required_params = {
"detail": [
{
"loc": ["body", "grant_type"],
"msg": "field required",
"type": "value_error.missing",
},
{
"loc": ["body", "username"],
"msg": "field required",
"type": "value_error.missing",
},
{
"loc": ["body", "password"],
"msg": "field required",
"type": "value_error.missing",
},
]
}
grant_type_required = {
"detail": [
{
"loc": ["body", "grant_type"],
"msg": "field required",
"type": "value_error.missing",
}
]
}
grant_type_incorrect = {
"detail": [
{
"loc": ["body", "grant_type"],
"msg": 'string does not match regex "password"',
"type": "value_error.str.regex",
"ctx": {"pattern": "password"},
}
]
}
@pytest.mark.parametrize(
"data,expected_status,expected_response",
[
(None, 422, required_params),
({"username": "johndoe", "password": "secret"}, 422, grant_type_required),
(
{"username": "johndoe", "password": "secret", "grant_type": "incorrect"},
422,
grant_type_incorrect,
),
(
{"username": "johndoe", "password": "secret", "grant_type": "password"},
200,
{
"grant_type": "password",
"username": "johndoe",
"password": "secret",
"scopes": [],
"client_id": None,
"client_secret": None,
},
),
],
)
def test_strict_login(data, expected_status, expected_response):
response = client.post("/login", data=data)
assert response.status_code == expected_status
assert response.json() == expected_response
| 31.644
| 96
| 0.447857
|
4cf03d4c7c5f7a5d4e2a37a112027954a7e6b867
| 45
|
py
|
Python
|
pycoin/tx/TxOut.py
|
genitrust/pycoin
|
b0daefdd69fa0400cc48ae16f923f03d366db7b4
|
[
"MIT"
] | null | null | null |
pycoin/tx/TxOut.py
|
genitrust/pycoin
|
b0daefdd69fa0400cc48ae16f923f03d366db7b4
|
[
"MIT"
] | null | null | null |
pycoin/tx/TxOut.py
|
genitrust/pycoin
|
b0daefdd69fa0400cc48ae16f923f03d366db7b4
|
[
"MIT"
] | 1
|
2020-05-20T09:53:27.000Z
|
2020-05-20T09:53:27.000Z
|
from pycoin.coins.bitcoin.TxOut import TxOut
| 22.5
| 44
| 0.844444
|
23c3b84b641859eaaf7fb8053426b35186e7c0f4
| 24,437
|
py
|
Python
|
cvpysdk/metricsreport.py
|
kevinjojeejoseph/cvpysdk
|
29d82fe6e8f552940534c508cfdbf2c14d16cde3
|
[
"Apache-2.0"
] | 21
|
2020-09-09T07:26:05.000Z
|
2022-02-27T19:05:42.000Z
|
cvpysdk/metricsreport.py
|
kevinjojeejoseph/cvpysdk
|
29d82fe6e8f552940534c508cfdbf2c14d16cde3
|
[
"Apache-2.0"
] | 43
|
2020-07-29T07:27:21.000Z
|
2022-01-31T12:44:19.000Z
|
cvpysdk/metricsreport.py
|
kevinjojeejoseph/cvpysdk
|
29d82fe6e8f552940534c508cfdbf2c14d16cde3
|
[
"Apache-2.0"
] | 14
|
2020-08-12T16:43:30.000Z
|
2021-12-06T01:59:41.000Z
|
# -*- coding: utf-8 -*-
# --------------------------------------------------------------------------
# Copyright Commvault Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# --------------------------------------------------------------------------
"""File for performing Metrics operations.
_Metrics : Class for representing all common operations on Metrics Reporting
PrivateMetrics : Class for representing Private Metrics and performing operations on it.
PublicMetrics : Class for representing Public Metrics and performing operations on it.
use method save_config() or upload_now() to save the updated configurations.
Metrics:
__init__(Commcell_object, isprivate)-- initialise with object of CommCell and flag to
specificy metrics Type
__repr__() -- returns the string to represent the instance of the
Metrics class
enable_health() -- enables Health service
disable_health() -- disables Health service
enable_activity() -- enables Activity service
disable_activity() -- disables Activity service
enable_audit() -- enables Audit service
disable_audit() -- disables Audit service
disable_chargeback() -- disables Chargeback service
enable_post_upgrade_check() -- enables enable_post_upgrade_check Service
enable_all_services() -- enables All Service in metrics
disable_all_services() -- disables All Service
enable_metrics() -- enables Metrics Service
disable_metrics() -- disables Metrics Service in CommServe
set_upload_freq() -- updates the upload frequency
set_data_collection_window -- updates the data collection window
remove_data_collection_window-- removes data collection window
set_all_clientgroup() -- updates metrics configuration with all client groups
set_clientgroups() -- sets the client groups for metrics
save_config() -- updates the configuration of Metrics, this must be
called to save the configuration changes made in this object
upload_now() -- Performs Upload Now operation of metrics
wait_for_download_completion()-- waits for metrics download operation to complete
wait_for_collection_completion-- waits for metrics collection operation to complete
wait_for_upload_completion() -- waits for metrics upload operation to complete
wait_for_uploadnow_completion()-- waits for complete metrics operation to complete
get_possible_uploaded_filenames-- gives the possible names for the uploaded files
refresh() -- refresh the properties and config of the Metrics Server
PrivateMetrics:
__init__(Commcell_object) -- initialise with object of CommCell
update_url(hostname) -- Updates Metrics URL for download and upload
enable_chargeback(daily, weekly, monthly)
-- enables chargeback service
PublicMetrics:
__init__(Commcell_object) -- initialise with object of CommCell
enable_chargeback() -- enables chargeback service
enable_upgrade_readiness() -- Enables pre upgrade readiness service
disable_upgrade_readiness() -- disables pre upgrade readiness service
enable_proactive_support() -- Enables Proactive Support service
disable_proactive_support() -- disables Proactive Support service
enable_cloud_assist() -- Enables Cloud Assist service
disable_cloud_assist() -- disables Cloud Assist service
"""
from __future__ import absolute_import
from __future__ import unicode_literals
from time import sleep
from urllib.parse import urlparse
from cvpysdk.license import LicenseDetails
from .exception import SDKException
class _Metrics(object):
"""Class for common operations in Metrics reporting
this will be inherited by Private and Cloud metrics"""
def __init__(self, commcell_object, isprivate):
self._commcell_object = commcell_object
self._isprivate = isprivate
self._METRICS = self._commcell_object._services['METRICS']
self._GET_METRICS = self._commcell_object._services['GET_METRICS'] % self._isprivate
self._enable_service = True
self._disable_service = False
self._get_metrics_config()
def __repr__(self):
"""Representation string for the instance of the UserGroups class."""
if self._isprivate == 1:
metrics_type = 'Private'
else:
metrics_type = 'Public'
return "{0} Metrics class instance for Commcell: '{1}' with config '{2}'".format(
metrics_type,
self._commcell_object.commserv_name,
self._metrics_config
)
def _get_metrics_config(self):
flag, response = self._commcell_object._cvpysdk_object.make_request(
'GET', self._GET_METRICS
)
if flag:
self._metrics_config = response.json()
self._metrics_config.update({'isPrivateCloud': bool(self._isprivate == 1)})
if self._metrics_config and 'config' in self._metrics_config:
# get services
self.services = {}
self._cloud = self._metrics_config['config']['cloud']
self._service_list = self._cloud['serviceList']
for service in self._service_list:
service_name = service['service']['name']
status = service['enabled']
self.services[service_name] = status
else:
raise SDKException('Response', '102')
else:
raise SDKException('Response', '101', response.text)
def refresh(self):
"""updates metrics object with the latest configuration"""
self._get_metrics_config()
def _update_service_state(self, service_name, state):
for idx, service in enumerate(self._service_list):
if service['service']['name'] == service_name:
self._service_list[idx]['enabled'] = state
self.services[service_name] = state
@property
def lastdownloadtime(self):
"""Returns last download time in unix time format"""
return self._metrics_config['config']['scriptDownloadTime']
@property
def lastcollectiontime(self):
"""Returns last collection time in unix time format"""
return self._metrics_config['config']['lastCollectionTime']
@property
def lastuploadtime(self):
"""Returns last upload time in unix time format"""
return self._metrics_config['config']['lastUploadTime']
@property
def nextuploadtime(self):
"""Returns last Next time in unix time format"""
return self._metrics_config['config']['nextUploadTime']
@property
def uploadfrequency(self):
"""Returns last Next time in unix time format"""
return self._metrics_config['config']['uploadFrequency']
def enable_health(self):
"""enables Health Service"""
if self.services['Health Check'] is not True:
self._update_service_state('Health Check', self._enable_service)
def disable_health(self):
"""disables Health Service"""
if self.services['Health Check'] is True:
self._update_service_state('Health Check', self._disable_service)
def enable_activity(self):
"""enables Activity Service"""
if self.services['Activity'] is not True:
self._update_service_state('Activity', self._enable_service)
def disable_activity(self):
"""disables Activity Service"""
if self.services['Activity'] is True:
self._update_service_state('Activity', self._disable_service)
def enable_audit(self):
"""enables Audit Service"""
if self.services['Audit'] is not True:
self._update_service_state('Audit', self._enable_service)
def disable_audit(self):
"""disables Audit Service"""
if self.services['Audit'] is True:
self._update_service_state('Audit', self._disable_service)
def enable_post_upgrade_check(self):
"""enables post_upgrade_check Service"""
if self.services['Post Upgrade Check'] is not True:
self._update_service_state('Post Upgrade Check', self._enable_service)
def disables_post_upgrade_check(self):
"""disables post_upgrade_check Service"""
if self.services['Post Upgrade Check'] is True:
self._update_service_state('Post Upgrade Check', self._disable_service)
def disables_chargeback(self):
"""disables post_upgrade_check Service"""
if self.services['Charge Back'] is True:
self._update_service_state('Charge Back', self._disable_service)
def enable_all_services(self):
"""enables All Service"""
for index, service in enumerate(self._service_list):
if service['service']['name'] != 'Post Upgrade Check':
self._service_list[index]['enabled'] = self._enable_service
service_name = service['service']['name']
self.services[service_name] = self._enable_service
def disable_all_services(self):
"""disables All Service"""
for index, service in enumerate(self._service_list):
if service['service']['name'] != 'Post Upgrade Check':
self._service_list[index]['enabled'] = self._disable_service
service_name = service['service']['name']
self.services[service_name] = self._disable_service
def set_upload_freq(self, days=1):
"""
updates the upload frequency
Args:
days (int): number of days for upload frequency, value can be between 1 to 7
Raises:
SDKException:
if invalid days supplied for upload frequency
"""
if days < 1:
raise SDKException('Metrics', '101', 'Invalid Upload Frequency supplied')
self._metrics_config['config']['uploadFrequency'] = days
def set_data_collection_window(self, seconds=28800):
"""
updates the data collection window
Args:
seconds: number for seconds after 12 AM
e.g.; 28800 for 8 AM
default; 28800
Raises:
SDKException:
if window specified is below 12.05 am
"""
if seconds < 300: # minimum 5 minutes after 12 midnight
raise SDKException('Metrics', '101', 'Data collection window should be above 12.05 AM')
self._metrics_config['config']['dataCollectionTime'] = seconds
def remove_data_collection_window(self):
"""removes data collection window"""
self._metrics_config['config']['dataCollectionTime'] = -1
def set_all_clientgroups(self):
"""updates metrics configuration with all client groups"""
# sets the list to one row with client group id as -1
self._metrics_config['config']['clientGroupList'] = [{'_type_': 28, 'clientGroupId': -1}]
def set_clientgroups(self, clientgroup_name=None):
"""
sets the client groups for metrics
Args:
clientgroup_name (list): list of client group names, None is set all client groups
will be enabled.
"""
if clientgroup_name is None:
self.set_all_clientgroups()
else:
self._metrics_config['config']['clientGroupList'] = []
clientgroup = self._metrics_config['config']['clientGroupList']
for each_client_grp in clientgroup_name:
cg_id = self._commcell_object.client_groups.get(each_client_grp).clientgroup_id
clientgroup.append(
{'_type_': 28, 'clientGroupId': int(cg_id), 'clientGroupName': each_client_grp}
)
def enable_metrics(self):
"""enables Metrics in CommServe"""
self._metrics_config['config']['commcellDiagUsage'] = self._enable_service
def disable_metrics(self):
"""disables Metrics in CommServe"""
self._metrics_config['config']['commcellDiagUsage'] = self._disable_service
def save_config(self):
"""
updates the configuration of Metrics
this must be called to save the configuration changes made in this object
Raises:
SDKException:
if response is not success
"""
flag, response = self._commcell_object._cvpysdk_object.make_request(
'POST', self._METRICS, self._metrics_config
)
if not flag:
raise SDKException('Response', '101', response.text)
def upload_now(self):
"""
Performs Upload Now operation of metrics
Raises:
SDKException:
if response is not success:
"""
self._metrics_config['config']['uploadNow'] = 1
flag, response = self._commcell_object._cvpysdk_object.make_request(
'POST', self._METRICS, self._metrics_config
)
if not flag:
raise SDKException('Response', '101', response.text)
# reset upload now flag
self._metrics_config['config']['uploadNow'] = 0
def wait_for_download_completion(self, timeout=300):
"""
Waits for Metrics collection to complete for maximum of seconds given in timeout
Args:
timeout (int): maximum seconds to wait
"""
self.refresh()
time_limit = timeout
while time_limit > 0:
if self.lastdownloadtime > 0:
return True
else:
sleep(30)
time_limit -= 30
self.refresh()
raise TimeoutError(
"Download process didn't complete after {0} seconds".format(timeout))
def wait_for_collection_completion(self, timeout=400):
"""
Waits for Metrics collection to complete for maximum of seconds given in timeout
Args:
timeout (int): maximum seconds to wait
Raises: Timeout error if collection didn't complete within timeout period
"""
self.refresh()
timelimit = timeout
while timelimit > 0:
if self.lastcollectiontime > 0:
return True
else:
sleep(30)
timelimit -= 30
self.refresh()
raise TimeoutError("Collection process didn't complete after {0} seconds".format(timeout))
def wait_for_upload_completion(self, timeout=120):
"""
Waits for Metrics upload to complete for maximum of seconds given in timeout
Args:
timeout (int): maximum seconds to wait
Raises: Timeout error if upload didn't complete within timeout period
"""
self.refresh()
timelimit = timeout
while timelimit > 0:
if self.lastuploadtime >= self.lastcollectiontime and self.lastuploadtime > 0:
return True
else:
sleep(30)
timelimit -= 30
self.refresh()
raise TimeoutError("Upload process didn't complete after {0} seconds".format(timeout))
def wait_for_uploadnow_completion(self,
download_timeout=300,
collection_timeout=400,
upload_timeout=120):
"""
Waits for Metrics uploadNow operation to complete, checks both collection and upload
Args:
download_timeout (int): maximum seconds to wait for download
collection_timeout (int): maximum seconds to wait for collection
upload_timeout (int): maximum seconds to wait for upload
Raises: Timeout error if uploadNow operation didn't complete
"""
self.wait_for_download_completion(download_timeout)
self.wait_for_collection_completion(collection_timeout)
self.wait_for_upload_completion(upload_timeout)
def _get_commcell_id(self):
"""returns the hexadecimal value of commcell id"""
license_details = LicenseDetails(self._commcell_object)
ccid = license_details.commcell_id
if ccid == -1:
commcellid = 'FFFFF'
else:
commcellid = hex(ccid).split('x')[1].upper()
return commcellid
def get_uploaded_filename(self, query_id=None):
"""
Gets last uploaded file name
Args:
query_id (int): optional argument to get file name specific to a query
Returns : Last uploaded file name
"""
commcellid = self._get_commcell_id()
cs_lastcollectiontime = int(self.lastcollectiontime)
if cs_lastcollectiontime == 0:
raise Exception("last collection time is 0, Upload didn't complete or failed")
if query_id is None:
file_name = "CSS" + "" + str(cs_lastcollectiontime) + "_" + str(commcellid) + ".xml"
else:
file_name = "CSS" + "" + str(cs_lastcollectiontime) + "_" + str(
commcellid) + "_" + str(query_id) + ".xml"
return file_name
class PrivateMetrics(_Metrics):
"""Class for operations in private Metrics reporting"""
def __init__(self, commcell_object):
"""Initialize object of the UserGroups class.
Args:
commcell_object (object) -- instance of the Commcell class
type -- 1 for private, 0 for public
Returns:
object - instance of the UserGroups class
"""
_Metrics.__init__(self, commcell_object, isprivate=True)
def _update_private_download_url(self, hostname, port, protocol):
self._cloud['downloadURL'] = '{0}://{1}:{2}/downloads/sqlscripts/'.format(protocol,
hostname,
port)
def _update_private_upload_url(self, hostname, port, protocol):
self._cloud['uploadURL'] = '{0}://{1}:{2}/webconsole/'.format(protocol, hostname, port)
def _update_chargeback_flags(self, daily, weekly, monthly):
flags = 0
if daily:
flags = flags | 4
if weekly:
flags = flags | 8
if monthly:
flags = flags | 16
for service in self._service_list:
if service['service']['name'] == 'Charge Back':
service['flags'] = flags
@property
def downloadurl(self):
"""Returns download URL of private metrics"""
return self._metrics_config['config']['cloud']['downloadURL']
@property
def uploadurl(self):
"""Returns Upload URL of private metrics"""
return self._metrics_config['config']['cloud']['uploadURL']
@property
def private_metrics_server_name(self):
return urlparse(self.uploadurl).hostname
def update_url(self, hostname, port=80, protocol='http'):
"""
updates private Metrics URL in CommServe
Args:
hostname (str): Metrics server hostname
port (int): port of webconsole
e.g.; 80 for http and 443 for https
protocol (str): http or https
default: http
"""
self._update_private_download_url(hostname, port, protocol)
self._update_private_upload_url(hostname, port, protocol)
def enable_chargeback(self, daily=True, weekly=False, monthly=False):
"""
Enables Chargeback service as per the daily,weekly and Monthly arguments passes
Args:
daily (bool): enables daily chargeback
weekly (bool): enables weekly chargeback
monthly(bool): enables Monthly chargeback
"""
if self.services['Charge Back'] is not True:
self._update_service_state('Charge Back', self._enable_service)
self._update_chargeback_flags(daily, weekly, monthly)
def enable_forwarding(self, forwarding_url):
"""
Enables forwarding
Args:
forwarding_url: Webconsole url where metrics data to be forwarded
"""
fwd_info = [{
"httpServerURL": forwarding_url,
"isPublic": False,
"urlPwd": "",
"urlUser": ""
}]
self._metrics_config['config']['tieringActive'] = True
self._metrics_config['config']['HttpServerInfo']["httpServer"] = fwd_info
def disable_forwarding(self):
"""Disables forwarding"""
self._metrics_config['config']['tieringActive'] = False
class CloudMetrics(_Metrics):
"""Class for operations in Cloud Metrics reporting"""
def __init__(self, commcell_object):
"""Initialize object of the UserGroups class.
Args:
commcell_object (object) -- instance of the Commcell class
Returns:
object - instance of the UserGroups class
"""
_Metrics.__init__(self, commcell_object, isprivate=False)
@property
def randomization_minutes(self):
return self._metrics_config['config']['randomization']
def enable_chargeback(self):
"""Enables Chargeback service"""
if self.services['Charge Back'] is not True:
self._update_service_state('Charge Back', self._enable_service)
def enable_upgrade_readiness(self):
"""Enables pre upgrade readiness service"""
if self.services['Upgrade Readiness'] is not True:
self._update_service_state('Upgrade Readiness', self._enable_service)
def disable_upgrade_readiness(self):
"""disables pre upgrade readiness service"""
if self.services['Upgrade Readiness'] is True:
self._update_service_state('Upgrade Readiness', self._disable_service)
def enable_proactive_support(self):
"""Enables Proactive Support service"""
if self.services['Proactive Support'] is not True:
self._update_service_state('Proactive Support', self._enable_service)
def disable_proactive_support(self):
"""disables Proactive Support service"""
if self.services['Proactive Support'] is True:
self._update_service_state('Proactive Support', self._disable_service)
def enable_cloud_assist(self):
"""Enables Cloud Assist service and proactive support if not already enabled"""
if self.services['Proactive Support'] is not True:
# pro active support must be enabled to enable cloud assist
self.enable_proactive_support()
self._update_service_state('Cloud Assist', self._enable_service)
def disable_cloud_assist(self):
"""disables Cloud Assist service"""
if self.services['Cloud Assist'] is True:
self._update_service_state('Cloud Assist', self._disable_service)
def set_randomization_minutes(self, minutes=0):
"""
Sets the randomization value in gxglobal param
Args:
minutes (int): randomization value in minutes
"""
qcommand = self._commcell_object._services['QCOMMAND']
qoperation = ('qoperation execscript -sn SetKeyIntoGlobalParamTbl.sql '
'-si CommservSurveyRandomizationEnabled -si y -si {0}'.format(minutes))
flag, response = self._commcell_object._cvpysdk_object.make_request(
'POST', qcommand, qoperation
)
if not flag:
raise SDKException('Response', '101', response.text)
| 38.362637
| 99
| 0.623767
|
29fe24f12e62d17c7c04a8ecc76191b16be3481b
| 4,426
|
py
|
Python
|
reservoirpy/welllogspy/tracks/oilshowtrack.py
|
scuervo91/reservoirpy
|
a4db620baf3ff66a85c7f61b1919713a8642e6fc
|
[
"MIT"
] | 16
|
2020-05-07T01:57:04.000Z
|
2021-11-27T12:45:59.000Z
|
reservoirpy/welllogspy/tracks/oilshowtrack.py
|
scuervo91/reservoirpy
|
a4db620baf3ff66a85c7f61b1919713a8642e6fc
|
[
"MIT"
] | null | null | null |
reservoirpy/welllogspy/tracks/oilshowtrack.py
|
scuervo91/reservoirpy
|
a4db620baf3ff66a85c7f61b1919713a8642e6fc
|
[
"MIT"
] | 5
|
2020-05-12T07:28:24.000Z
|
2021-12-10T21:24:59.000Z
|
import matplotlib.pyplot as plt
import matplotlib as mpl
import pandas as pd
import numpy as np
def oilshowtrack(df: pd.DataFrame,
oilshow: str = None,
lims: list = None,
dtick: bool = False,
fill: bool = True,
ax=None,
fontsize=8,
correlation: pd.DataFrame = None,
grid_numbers : list = [11,51],
steps: list = None,
oilshow_colormap: str='summer',
corr_kw={},
show_kw={},
fill_kw={},
depth_ref:str='md'):
"""oilshowtrack [summary]
Parameters
----------
df : pd.DataFrame
[description]
oilshow : str, optional
[description], by default None
lims : list, optional
[description], by default None
dtick : bool, optional
[description], by default False
fill : bool, optional
[description], by default True
ax : [type], optional
[description], by default None
fontsize : int, optional
[description], by default 8
correlation : pd.DataFrame, optional
[description], by default None
grid_numbers : list, optional
[description], by default [11,51]
steps : list, optional
[description], by default None
oilshow_colormap : str, optional
[description], by default 'summer'
corr_kw : dict, optional
[description], by default {}
show_kw : dict, optional
[description], by default {}
fill_kw : dict, optional
[description], by default {}
depth_ref : str, optional
[description], by default 'md'
"""
oax=ax or plt.gca()
defkwa = {
'color': 'black',
'linestyle':'-',
'linewidth': 1
}
for (k,v) in defkwa.items():
if k not in show_kw:
show_kw[k]=v
def_corr_kw = {
'color': 'red',
'linestyle':'--',
'linewidth': 2
}
for (k,v) in def_corr_kw.items():
if k not in corr_kw:
corr_kw[k]=v
def_fill_kw = {
'color': 'darkgreen',
}
for (k,v) in def_fill_kw.items():
if k not in fill_kw:
fill_kw[k]=v
depth = df.index if depth_ref=='md' else df[depth_ref]
if oilshow is not None:
if isinstance(oilshow,str):
oax.plot(df[oilshow],depth,**show_kw) #Plotting
elif isinstance(oilshow,list):
cmap = mpl.cm.get_cmap(oilshow_colormap,len(oilshow))
for i,g in enumerate(oilshow):
show_kw['color']=cmap(i)
oax.plot(df[g],depth,**show_kw)
if lims==None: #Depth Limits
lims=[depth.min(),depth.max()]
oax.set_ylim([lims[1],lims[0]])
#Set the vertical grid spacing
if steps is None:
mayor_grid = np.linspace(lims[0],lims[1],grid_numbers[0])
minor_grid = np.linspace(lims[0],lims[1],grid_numbers[1])
else:
mayor_grid = np.arange(lims[0],lims[1],steps[0])
minor_grid = np.arange(lims[0],lims[1],steps[1])
oax.set_xlim([0,1])
oax.set_xlabel("OilShow")
oax.set_xticks(np.linspace(0,1,4))
oax.set_xticklabels(np.round(np.linspace(0,1,4),decimals=2))
oax.xaxis.tick_top()
oax.xaxis.set_label_position("top")
oax.tick_params("both",labelsize=fontsize)
oax.set_yticks(mayor_grid)
oax.set_yticks(minor_grid,minor=True)
if dtick==True:
oax.set_yticklabels(mayor_grid,11)
else:
oax.set_yticklabels([])
if fill==True and isinstance(oilshow,str):
oax.fill_betweenx(depth,0,df[oilshow],**fill_kw)
#Add Correlation Line
if correlation is not None:
cor_ann = corr_kw.pop('ann',False)
for i in correlation.iterrows():
oax.hlines(i[1]['depth'],0,1, **corr_kw)
if cor_ann:
try:
oax.annotate(f"{i[1]['depth']} - {i[1]['comment']} ",xy=(1-0.3,i[1]['depth']-1),
xycoords='data',horizontalalignment='right',bbox={'boxstyle':'roundtooth', 'fc':'0.8'})
except:
oax.annotate(f"{i[1]['depth']}",xy=(1-0.3,i[1]['depth']-1),
xycoords='data',horizontalalignment='right',
bbox={'boxstyle':'roundtooth', 'fc':'0.8'})
| 32.072464
| 120
| 0.542024
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.