hexsha
stringlengths 40
40
| size
int64 3
1.03M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
972
| max_stars_repo_name
stringlengths 6
130
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
972
| max_issues_repo_name
stringlengths 6
130
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
116k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
972
| max_forks_repo_name
stringlengths 6
130
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 3
1.03M
| avg_line_length
float64 1.13
941k
| max_line_length
int64 2
941k
| alphanum_fraction
float64 0
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
ad8d447b964ebc89e8a57c459e754b4bbbeeb7e0
| 7,118
|
py
|
Python
|
obfsproxy/transports/scramblesuit/message.py
|
Samdney/obfsproxy
|
2bf9d096bb45a4e6c69f1cbdc3d2565f54a44efc
|
[
"BSD-3-Clause"
] | 101
|
2015-01-24T07:37:03.000Z
|
2022-01-22T15:38:44.000Z
|
obfsproxy/transports/scramblesuit/message.py
|
david415/obfsproxy
|
ea0e1b2b62be9113155f25f53baf5fce4392c430
|
[
"BSD-3-Clause"
] | 1
|
2015-03-29T15:16:04.000Z
|
2015-04-09T03:56:24.000Z
|
obfsproxy/transports/scramblesuit/message.py
|
david415/obfsproxy
|
ea0e1b2b62be9113155f25f53baf5fce4392c430
|
[
"BSD-3-Clause"
] | 29
|
2015-05-11T09:45:43.000Z
|
2020-02-22T17:50:27.000Z
|
"""
This module provides code to handle ScrambleSuit protocol messages.
The exported classes and functions provide interfaces to handle protocol
messages, check message headers for validity and create protocol messages out
of application data.
"""
import obfsproxy.common.log as logging
import obfsproxy.common.serialize as pack
import obfsproxy.transports.base as base
import mycrypto
import const
log = logging.get_obfslogger()
def createProtocolMessages( data, flags=const.FLAG_PAYLOAD ):
"""
Create protocol messages out of the given payload.
The given `data' is turned into a list of protocol messages with the given
`flags' set. The list is then returned. If possible, all messages fill
the MTU.
"""
messages = []
while len(data) > const.MPU:
messages.append(ProtocolMessage(data[:const.MPU], flags=flags))
data = data[const.MPU:]
messages.append(ProtocolMessage(data, flags=flags))
log.debug("Created %d protocol messages." % len(messages))
return messages
def getFlagNames( flags ):
"""
Return the flag name encoded in the integer `flags' as string.
This function is only useful for printing easy-to-read flag names in debug
log messages.
"""
if flags == 1:
return "PAYLOAD"
elif flags == 2:
return "NEW_TICKET"
elif flags == 4:
return "PRNG_SEED"
else:
return "Undefined"
def isSane( totalLen, payloadLen, flags ):
"""
Verifies whether the given header fields are sane.
The values of the fields `totalLen', `payloadLen' and `flags' are checked
for their sanity. If they are in the expected range, `True' is returned.
If any of these fields has an invalid value, `False' is returned.
"""
def isFine( length ):
"""
Check if the given length is fine.
"""
return True if (0 <= length <= const.MPU) else False
log.debug("Message header: totalLen=%d, payloadLen=%d, flags"
"=%s" % (totalLen, payloadLen, getFlagNames(flags)))
validFlags = [
const.FLAG_PAYLOAD,
const.FLAG_NEW_TICKET,
const.FLAG_PRNG_SEED,
]
return isFine(totalLen) and \
isFine(payloadLen) and \
totalLen >= payloadLen and \
(flags in validFlags)
class ProtocolMessage( object ):
"""
Represents a ScrambleSuit protocol message.
This class provides methods to deal with protocol messages. The methods
make it possible to add padding as well as to encrypt and authenticate
protocol messages.
"""
def __init__( self, payload="", paddingLen=0, flags=const.FLAG_PAYLOAD ):
"""
Initialises a ProtocolMessage object.
"""
payloadLen = len(payload)
if (payloadLen + paddingLen) > const.MPU:
raise base.PluggableTransportError("No overly long messages.")
self.totalLen = payloadLen + paddingLen
self.payloadLen = payloadLen
self.payload = payload
self.flags = flags
def encryptAndHMAC( self, crypter, hmacKey ):
"""
Encrypt and authenticate this protocol message.
This protocol message is encrypted using `crypter' and authenticated
using `hmacKey'. Finally, the encrypted message prepended by a
HMAC-SHA256-128 is returned and ready to be sent over the wire.
"""
encrypted = crypter.encrypt(pack.htons(self.totalLen) +
pack.htons(self.payloadLen) +
chr(self.flags) + self.payload +
(self.totalLen - self.payloadLen) * '\0')
hmac = mycrypto.HMAC_SHA256_128(hmacKey, encrypted)
return hmac + encrypted
def addPadding( self, paddingLen ):
"""
Add padding to this protocol message.
Padding is added to this protocol message. The exact amount is
specified by `paddingLen'.
"""
# The padding must not exceed the message size.
if (self.totalLen + paddingLen) > const.MPU:
raise base.PluggableTransportError("Can't pad more than the MTU.")
if paddingLen == 0:
return
log.debug("Adding %d bytes of padding to %d-byte message." %
(paddingLen, const.HDR_LENGTH + self.totalLen))
self.totalLen += paddingLen
def __len__( self ):
"""
Return the length of this protocol message.
"""
return const.HDR_LENGTH + self.totalLen
# Alias class name in order to provide a more intuitive API.
new = ProtocolMessage
class MessageExtractor( object ):
"""
Extracts ScrambleSuit protocol messages out of an encrypted stream.
"""
def __init__( self ):
"""
Initialise a new MessageExtractor object.
"""
self.recvBuf = ""
self.totalLen = None
self.payloadLen = None
self.flags = None
def extract( self, data, aes, hmacKey ):
"""
Extracts (i.e., decrypts and authenticates) protocol messages.
The raw `data' coming directly from the wire is decrypted using `aes'
and authenticated using `hmacKey'. The payload is then returned as
unencrypted protocol messages. In case of invalid headers or HMACs, an
exception is raised.
"""
self.recvBuf += data
msgs = []
# Keep trying to unpack as long as there is at least a header.
while len(self.recvBuf) >= const.HDR_LENGTH:
# If necessary, extract the header fields.
if self.totalLen == self.payloadLen == self.flags == None:
self.totalLen = pack.ntohs(aes.decrypt(self.recvBuf[16:18]))
self.payloadLen = pack.ntohs(aes.decrypt(self.recvBuf[18:20]))
self.flags = ord(aes.decrypt(self.recvBuf[20]))
if not isSane(self.totalLen, self.payloadLen, self.flags):
raise base.PluggableTransportError("Invalid header.")
# Parts of the message are still on the wire; waiting.
if (len(self.recvBuf) - const.HDR_LENGTH) < self.totalLen:
break
rcvdHMAC = self.recvBuf[0:const.HMAC_SHA256_128_LENGTH]
vrfyHMAC = mycrypto.HMAC_SHA256_128(hmacKey,
self.recvBuf[const.HMAC_SHA256_128_LENGTH:
(self.totalLen + const.HDR_LENGTH)])
if rcvdHMAC != vrfyHMAC:
raise base.PluggableTransportError("Invalid message HMAC.")
# Decrypt the message and remove it from the input buffer.
extracted = aes.decrypt(self.recvBuf[const.HDR_LENGTH:
(self.totalLen + const.HDR_LENGTH)])[:self.payloadLen]
msgs.append(ProtocolMessage(payload=extracted, flags=self.flags))
self.recvBuf = self.recvBuf[const.HDR_LENGTH + self.totalLen:]
# Protocol message processed; now reset length fields.
self.totalLen = self.payloadLen = self.flags = None
return msgs
| 31.356828
| 79
| 0.622647
|
7fd138b63e60cf1167f2e9ac1477ffbb3b9ad844
| 1,837
|
py
|
Python
|
leetcode/278.first_bad_version/278.FirstBadVersion_JohnJim0816.py
|
henrytien/AlgorithmSolutions
|
62339269f4fa698ddd2e73458caef875af05af8f
|
[
"MIT"
] | 15
|
2020-06-27T03:28:39.000Z
|
2021-08-13T10:42:24.000Z
|
leetcode/278.first_bad_version/278.FirstBadVersion_JohnJim0816.py
|
henrytien/AlgorithmSolutions
|
62339269f4fa698ddd2e73458caef875af05af8f
|
[
"MIT"
] | 40
|
2020-06-27T03:29:53.000Z
|
2020-11-05T12:29:49.000Z
|
leetcode/278.first_bad_version/278.FirstBadVersion_JohnJim0816.py
|
henrytien/AlgorithmSolutions
|
62339269f4fa698ddd2e73458caef875af05af8f
|
[
"MIT"
] | 22
|
2020-07-16T03:23:43.000Z
|
2022-02-19T16:00:55.000Z
|
#!/usr/bin/env python
# coding=utf-8
'''
@Author: John
@Email: johnjim0816@gmail.com
@Date: 2020-07-21 09:46:44
@LastEditor: John
@LastEditTime: 2020-07-21 09:47:20
@Discription:
@Environment: python 3.7.7
'''
# Source : https://leetcode.com/problems/first-bad-version/
# Author : JohnJim0816
# Date : 2020-07-21
#####################################################################################################
#
# You are a product manager and currently leading a team to develop a new product. Unfortunately, the
# latest version of your product fails the quality check. Since each version is developed based on
# the previous version, all the versions after a bad version are also bad.
#
# Suppose you have n versions [1, 2, ..., n] and you want to find out the first bad one, which causes
# all the following ones to be bad.
#
# You are given an API bool isBadVersion(version) which will return whether version is bad. Implement
# a function to find the first bad version. You should minimize the number of calls to the API.
#
# Example:
#
# Given n = 5, and version = 4 is the first bad version.
#
# call isBadVersion(3) -> false
# call isBadVersion(5) -> true
# call isBadVersion(4) -> true
#
# Then 4 is the first bad version.
#
#####################################################################################################
# The isBadVersion API is already defined for you.
# @param version, an integer
# @return a bool
# def isBadVersion(version):
class Solution:
def firstBadVersion(self, n):
"""
:type n: int n>1
:rtype: int
"""
left = 1
right = n
while left < right:
mid = (left + right) // 2
if isBadVersion(mid):
right = mid
else:
left = mid + 1
return left
| 31.135593
| 102
| 0.577028
|
94401deab3733d20ff47eb24fa7cc99808291697
| 2,109
|
py
|
Python
|
demo_double_cross_validation_for_pls.py
|
hkaneko1985/dcek
|
13d9228b2dc2fd87c2e08a01721e1b1b220f2e19
|
[
"MIT"
] | 25
|
2019-08-23T12:39:14.000Z
|
2022-03-30T08:58:15.000Z
|
demo_double_cross_validation_for_pls.py
|
hkaneko1985/dcek
|
13d9228b2dc2fd87c2e08a01721e1b1b220f2e19
|
[
"MIT"
] | 2
|
2022-01-06T11:21:21.000Z
|
2022-01-18T22:11:12.000Z
|
demo_double_cross_validation_for_pls.py
|
hkaneko1985/dcek
|
13d9228b2dc2fd87c2e08a01721e1b1b220f2e19
|
[
"MIT"
] | 16
|
2019-12-12T08:20:48.000Z
|
2022-01-26T00:34:31.000Z
|
# -*- coding: utf-8 -*- %reset -f
"""
@author: Hiromasa Kaneko
"""
# Demonstration of Double Cross-Validation (DCV) for PLS
import matplotlib.figure as figure
import matplotlib.pyplot as plt
import numpy as np
from dcekit.validation import double_cross_validation
from sklearn import datasets
# import pandas as pd
from sklearn.cross_decomposition import PLSRegression
from sklearn.model_selection import GridSearchCV
# Settings
max_pls_component_number = 30
inner_fold_number = 5 # "fold_number"-fold cross-validation (CV) for inter CV
outer_fold_number = 10 # "fold_number"-fold CV for outer CV
number_of_training_samples = 1000
number_of_test_samples = 1000
# Generate samples for demonstration
x, y = datasets.make_regression(n_samples=number_of_training_samples + number_of_test_samples, n_features=100,
n_informative=100, noise=100, random_state=0)
# DCV
pls_components = np.arange(1, max_pls_component_number + 1)
inner_cv = GridSearchCV(PLSRegression(), {'n_components': pls_components}, cv=inner_fold_number)
estimated_y = double_cross_validation(gs_cv=inner_cv, x=x, y=y, outer_fold_number=outer_fold_number,
do_autoscaling=True, random_state=0)
# yy-plot
plt.figure(figsize=figure.figaspect(1))
plt.scatter(y, estimated_y)
y_max = np.max(np.array([np.array(y), estimated_y]))
y_min = np.min(np.array([np.array(y), estimated_y]))
plt.plot([y_min - 0.05 * (y_max - y_min), y_max + 0.05 * (y_max - y_min)],
[y_min - 0.05 * (y_max - y_min), y_max + 0.05 * (y_max - y_min)], 'k-')
plt.ylim(y_min - 0.05 * (y_max - y_min), y_max + 0.05 * (y_max - y_min))
plt.xlim(y_min - 0.05 * (y_max - y_min), y_max + 0.05 * (y_max - y_min))
plt.xlabel('Actual Y')
plt.ylabel('Estimated Y in CV')
plt.show()
# r2dcv, RMSEdcv, MAEdcv
print('r2dcv: {0}'.format(float(1 - sum((y - estimated_y) ** 2) / sum((y - y.mean()) ** 2))))
print('RMSEdcv: {0}'.format(float((sum((y - estimated_y) ** 2) / len(y)) ** 0.5)))
print('MAEdcv: {0}'.format(float(sum(abs(y - estimated_y)) / len(y))))
| 42.18
| 111
| 0.683736
|
69490fd692a8298e0680ea33c78730521b0e84c3
| 19,173
|
py
|
Python
|
bokeh/tests/test_io.py
|
SiggyF/bokeh
|
52a2ce993b0f1102fd9e136f66036f52e91cdcc3
|
[
"BSD-3-Clause"
] | null | null | null |
bokeh/tests/test_io.py
|
SiggyF/bokeh
|
52a2ce993b0f1102fd9e136f66036f52e91cdcc3
|
[
"BSD-3-Clause"
] | null | null | null |
bokeh/tests/test_io.py
|
SiggyF/bokeh
|
52a2ce993b0f1102fd9e136f66036f52e91cdcc3
|
[
"BSD-3-Clause"
] | null | null | null |
#-----------------------------------------------------------------------------
# Copyright (c) 2012 - 2015, Continuum Analytics, Inc. All rights reserved.
#
# Powered by the Bokeh Development Team.
#
# The full license is in the file LICENSE.txt, distributed with this software.
#-----------------------------------------------------------------------------
from __future__ import absolute_import
from mock import patch, Mock, PropertyMock
import unittest
import bokeh.io as io
from bokeh.resources import Resources, _SessionCoordinates
from bokeh.document import Document
from bokeh.models.plots import Plot
class TestDefaultState(unittest.TestCase):
def test_type(self):
self.assertTrue(isinstance(io._state, io.State))
class testCurdoc(unittest.TestCase):
def test(self):
self.assertEqual(io.curdoc(), io._state.document)
class testCurstate(unittest.TestCase):
def test(self):
self.assertEqual(io.curstate(), io._state)
class DefaultStateTester(unittest.TestCase):
def _check_func_called(self, func, args, kwargs):
self.assertTrue(func.called)
self.assertEqual(func.call_args[0], args)
self.assertEqual(func.call_args[1], kwargs)
def setUp(self):
self._orig_state = io._state
io._state = Mock()
doc = Mock()
roots = PropertyMock(return_value=[])
type(doc).roots = roots
io._state.document = doc
def tearDown(self):
io._state = self._orig_state
io._state.document.clear()
class testOutputFile(DefaultStateTester):
def test_noarg(self):
default_kwargs = dict(title="Bokeh Plot", autosave=False, mode="cdn", root_dir=None)
io.output_file("foo.html")
self._check_func_called(io._state.output_file, ("foo.html",), default_kwargs)
def test_args(self):
kwargs = dict(title="title", autosave=True, mode="cdn", root_dir="foo")
io.output_file("foo.html", **kwargs)
self._check_func_called(io._state.output_file, ("foo.html",), kwargs)
class TestOutputNotebook(DefaultStateTester):
@patch('bokeh.io.load_notebook')
def test_noarg(self, mock_load_notebook):
default_load_notebook_args = (None, False, False)
io.output_notebook()
self._check_func_called(io._state.output_notebook, (), {})
self._check_func_called(mock_load_notebook, default_load_notebook_args, {})
@patch('bokeh.io.load_notebook')
def test_args(self, mock_load_notebook):
load_notebook_args = (Resources(), True, True)
io.output_notebook(*load_notebook_args)
self._check_func_called(io._state.output_notebook, (), {})
self._check_func_called(mock_load_notebook, load_notebook_args, {})
class TestOutputServer(DefaultStateTester):
def test_noarg(self):
default_kwargs = dict(session_id="default", url="default", app_path='/', autopush=False)
io.output_server()
self._check_func_called(io._state.output_server, (), default_kwargs)
def test_args(self):
kwargs = dict(session_id="foo", url="http://example.com", app_path='/foo', autopush=True)
io.output_server(**kwargs)
self._check_func_called(io._state.output_server, (), kwargs)
class TestSave(DefaultStateTester):
pass
class Test_GetSaveArgs(DefaultStateTester):
def test_explicit_filename(self):
filename, resources, title = io._get_save_args(io._state, "filename", "resources", "title")
self.assertEqual(filename, "filename")
def test_default_filename(self):
io._state.file = {}
io._state.file['filename'] = "filename"
filename, resources, title = io._get_save_args(io._state, None, "resources", "title")
self.assertEqual(filename, "filename")
def test_missing_filename(self):
io._state.file = None
with self.assertRaises(RuntimeError):
io.save("obj", None, "resources", "title")
def test_explicit_resources(self):
filename, resources, title = io._get_save_args(io._state, "filename", "resources", "title")
self.assertEqual(resources, "resources")
def test_default_resources(self):
io._state.file = {}
io._state.file['resources'] = "resources"
filename, resources, title = io._get_save_args(io._state, "filename", None, "title")
self.assertEqual(resources, "resources")
@patch('warnings.warn')
def test_missing_resources(self, mock_warn):
from bokeh.resources import CDN
io._state.file = None
filename, resources, title = io._get_save_args(io._state, "filename", None, "title")
self.assertEqual(resources, CDN)
self.assertTrue(mock_warn.called)
self.assertEqual(mock_warn.call_args[0], ("save() called but no resources were supplied and output_file(...) "
"was never called, defaulting to resources.CDN",))
self.assertEqual(mock_warn.call_args[1], {})
def test_explicit_title(self):
filename, resources, title = io._get_save_args(io._state, "filename", "resources", "title")
self.assertEqual(title, "title")
def test_default_title(self):
io._state.file = {}
io._state.file['title'] = "title"
filename, resources, title = io._get_save_args(io._state, "filename", "resources", None)
self.assertEqual(title, "title")
@patch('warnings.warn')
def test_missing_title(self, mock_warn):
io._state.file = None
filename, resources, title = io._get_save_args(io._state, "filename", "resources", None)
self.assertEqual(title, "Bokeh Plot")
self.assertTrue(mock_warn.called)
self.assertEqual(mock_warn.call_args[0], ("save() called but no title was supplied and output_file(...) "
"was never called, using default title 'Bokeh Plot'",))
self.assertEqual(mock_warn.call_args[1], {})
class Test_SaveHelper(DefaultStateTester):
pass
class TestPush(DefaultStateTester):
@patch('bokeh.io._push_to_server')
def test_missing_output_server(self, mock_push_to_server):
# never calling output_server should pull session coords
# off the io._state object
io._state.server_enabled = False
io._state.document = Document()
io.push()
self._check_func_called(mock_push_to_server, (),
dict(url=io._state.url,
app_path=io._state.app_path,
session_id=io._state.session_id_allowing_none,
document=io._state.document,
io_loop=None))
@patch('bokeh.io._push_to_server')
def test_noargs(self, mock_push_to_server):
# if we had called output_server, the state object would be set
# up like this
io._state.session_id_allowing_none = "fakesessionid"
io._state.url = "http://example.com/"
io._state.app_path = "/bar"
io._state.server_enabled = True
io.push()
self._check_func_called(mock_push_to_server, (),
dict(url="http://example.com/",
document=io._state.document,
session_id="fakesessionid",
app_path="/bar",
io_loop=None))
@patch('bokeh.io._push_to_server')
def test_session_arg(self, mock_push_to_server):
# this simulates never calling output_server
io._state.server_enabled = False
io.push(session_id="somesession")
self._check_func_called(mock_push_to_server, (),
dict(url=io._state.url,
app_path=io._state.app_path,
document=io._state.document,
session_id="somesession",
io_loop=None))
@patch('bokeh.io._push_to_server')
def test_url_arg(self, mock_push_to_server):
# this simulates never calling output_server
io._state.server_enabled = False
io.push(url="http://example.com/")
self._check_func_called(mock_push_to_server, (),
dict(url="http://example.com/",
app_path=io._state.app_path,
session_id=io._state.session_id_allowing_none,
document=io._state.document,
io_loop=None))
@patch('bokeh.io._push_to_server')
def test_document_arg(self, mock_push_to_server):
# this simulates never calling output_server
io._state.server_enabled = False
d = Document()
io.push(document=d)
self._check_func_called(mock_push_to_server, (),
dict(url=io._state.url,
app_path=io._state.app_path,
session_id=io._state.session_id_allowing_none,
document=d,
io_loop=None))
@patch('bokeh.io._push_to_server')
def test_all_args(self, mock_push_to_server):
d = Document()
url = "https://example.com/"
session_id = "all_args_session"
app_path = "/foo"
# state should get ignored since we specified everything otherwise
state = Mock()
io_loop = Mock()
io.push(document=d, url=url, app_path=app_path, state=state, session_id=session_id, io_loop=io_loop)
self._check_func_called(mock_push_to_server, (),
dict(url="https://example.com/",
app_path="/foo",
document=d,
session_id="all_args_session",
io_loop=io_loop))
@patch('bokeh.io._push_to_server')
def test_state_arg(self, mock_push_to_server):
d = Document()
url = "https://example.com/state/"
session_id = "state_arg_session"
# state should get ignored since we specified everything otherwise
state = Mock()
state.document = d
state.url = url
state.session_id_allowing_none = session_id
io.push(state=state)
self._check_func_called(mock_push_to_server, (),
dict(url="https://example.com/state/",
document=d,
session_id="state_arg_session",
app_path = state.app_path,
io_loop=None))
class TestShow(DefaultStateTester):
@patch('bokeh.io._show_with_state')
def test_default_args(self, mock__show_with_state):
default_kwargs = dict(browser=None, new="tab")
io.show("obj", **default_kwargs)
self._check_func_called(mock__show_with_state, ("obj", io._state, None, "tab"), {})
@patch('bokeh.io._show_with_state')
def test_explicit_args(self, mock__show_with_state):
default_kwargs = dict(browser="browser", new="new")
io.show("obj", **default_kwargs)
self._check_func_called(mock__show_with_state, ("obj", io._state, "browser", "new"), {})
@patch('bokeh.io._show_with_state')
def test_show_adds_obj_to_document_if_not_already_there(m):
assert io._state.document.roots == []
p = Plot()
io.show(p)
assert p in io._state.document.roots
@patch('bokeh.io._show_with_state')
def test_show_doesnt_duplicate_if_already_there(m):
io._state.document.clear()
p = Plot()
io.show(p)
assert io._state.document.roots == [p]
io.show(p)
assert io._state.document.roots == [p]
class Test_ShowWithState(DefaultStateTester):
@patch('bokeh.io._show_notebook_with_state')
@patch('bokeh.io._show_server_with_state')
@patch('bokeh.io._show_file_with_state')
@patch('bokeh.util.browser.get_browser_controller')
def test_notebook(self, mock_get_browser_controller,
mock__show_file_with_state, mock__show_server_with_state,
mock__show_notebook_with_state):
mock_get_browser_controller.return_value = "controller"
s = io.State()
s.output_notebook()
io._show_with_state("obj", s, "browser", "new")
self._check_func_called(mock__show_notebook_with_state, ("obj", s), {})
self.assertFalse(mock__show_server_with_state.called)
self.assertFalse(mock__show_file_with_state.called)
s.output_file("foo.html")
io._show_with_state("obj", s, "browser", "new")
self._check_func_called(mock__show_notebook_with_state, ("obj", s), {})
self.assertFalse(mock__show_server_with_state.called)
self._check_func_called(mock__show_file_with_state, ("obj", s, "new", "controller"), {})
s._session = Mock
io._show_with_state("obj", s, "browser", "new")
self._check_func_called(mock__show_notebook_with_state, ("obj", s), {})
self.assertFalse(mock__show_server_with_state.called)
self._check_func_called(mock__show_file_with_state, ("obj", s, "new", "controller"), {})
@patch('bokeh.io.get_comms')
@patch('bokeh.io._show_notebook_with_state')
@patch('bokeh.io._show_server_with_state')
@patch('bokeh.io._show_file_with_state')
@patch('bokeh.util.browser.get_browser_controller')
def test_no_notebook(self, mock_get_browser_controller,
mock__show_file_with_state, mock__show_server_with_state,
mock__show_notebook_with_state,
mock_get_comms):
mock_get_browser_controller.return_value = "controller"
mock_get_comms.return_value = "comms"
s = io.State()
s.output_file("foo.html")
io._show_with_state("obj", s, "browser", "new")
self.assertFalse(mock__show_notebook_with_state.called)
self.assertFalse(mock__show_server_with_state.called)
self._check_func_called(mock__show_file_with_state, ("obj", s, "new", "controller"), {})
s._session_coords = _SessionCoordinates(dict(session_id="fakesession",
url="http://example.com",
app_path='/'))
s._server_enabled = True
io._show_with_state("obj", s, "browser", "new")
self.assertFalse(mock__show_notebook_with_state.called)
self._check_func_called(mock__show_server_with_state, ("obj", s, "new", "controller"), {})
self._check_func_called(mock__show_file_with_state, ("obj", s, "new", "controller"), {})
@patch('warnings.warn')
@patch('bokeh.util.browser.get_browser_controller')
def test_ShowNotebookWithState_bokehjs_load_failed(self, mock_get_browser_controller, mock_warn):
mock_get_browser_controller.return_value = "controller"
s = io.State()
s.output_notebook()
io._show_with_state("obj", s, "browser", "new")
self.assertTrue(mock_warn.called)
self.assertEqual(mock_warn.call_args[0], ("""
BokehJS does not appear to have successfully loaded. If loading BokehJS from CDN, this
may be due to a slow or bad network connection. Possible fixes:
* ALWAYS run `output_notebook()` in a cell BY ITSELF, AT THE TOP, with no other code
* re-rerun `output_notebook()` to attempt to load from CDN again, or
* use INLINE resources instead, as so:
from bokeh.resources import INLINE
output_notebook(resources=INLINE)
""",))
self.assertEqual(mock_warn.call_args[1], {})
class Test_ShowFileWithState(DefaultStateTester):
@patch('os.path.abspath')
@patch('bokeh.io.save')
def test(self, mock_save, mock_abspath):
s = io.State()
s.output_file("foo.html")
controller = Mock()
mock_save.return_value = "savepath"
io._show_file_with_state("obj", s, "window", controller)
self._check_func_called(mock_save, ("obj",), {"state": s})
self._check_func_called(controller.open, ("file://savepath",), {"new": 1})
io._show_file_with_state("obj", s, "tab", controller)
self._check_func_called(mock_save, ("obj",), {"state": s})
self._check_func_called(controller.open, ("file://savepath",), {"new": 2})
class Test_ShowNotebookWithState(DefaultStateTester):
@patch('bokeh.io.publish_display_data')
@patch('bokeh.io.autoload_server')
@patch('bokeh.io.push')
def test_with_server(self, mock_push, mock_autoload_server, mock_publish_display_data):
s = io.State()
s._server_enabled = True
mock_autoload_server.return_value = "snippet"
io._show_notebook_with_state("obj", s)
self._check_func_called(mock_push, (), {"state": s})
self._check_func_called(mock_publish_display_data, ({"text/html":"snippet"},), {})
@patch('bokeh.io.get_comms')
@patch('bokeh.io.publish_display_data')
@patch('bokeh.io.notebook_div')
def test_no_server(self, mock_notebook_div, mock_publish_display_data, mock_get_comms):
mock_get_comms.return_value = "comms"
s = io.State()
mock_notebook_div.return_value = "notebook_div"
io._nb_loaded = True
io._show_notebook_with_state("obj", s)
io._nb_loaded = False
self._check_func_called(mock_publish_display_data, ({"text/html": "notebook_div"},), {})
class Test_ShowServerWithState(DefaultStateTester):
@patch('bokeh.io.push')
def test(self, mock_push):
s = io.State()
s._session_coords = _SessionCoordinates(dict(session_id="thesession",
url="http://example.com",
app_path='/foo'))
s._server_enabled = True
controller = Mock()
io._show_server_with_state("obj", s, "window", controller)
self._check_func_called(mock_push, (), {"state": s})
self._check_func_called(controller.open, ("http://example.com/foo?bokeh-session-id=thesession",), {"new": 1})
io._show_server_with_state("obj", s, "tab", controller)
self._check_func_called(mock_push, (), {"state": s})
self._check_func_called(controller.open, ("http://example.com/foo?bokeh-session-id=thesession",), {"new": 2})
class TestResetOutput(DefaultStateTester):
def test(self):
io.reset_output()
self.assertTrue(io._state.reset.called)
def _test_layout_added_to_root(layout_generator, children=None):
layout = layout_generator(Plot() if children is None else children)
assert layout in io.curdoc().roots
io.curdoc().clear()
def _test_children_removed_from_root(layout_generator, children=None):
component = Plot()
io.curdoc().add_root(component if children is None else children[0][0])
layout_generator(component if children is None else children)
assert component not in io.curdoc().roots
io.curdoc().clear()
| 41.954048
| 118
| 0.628592
|
4a46ce0cca70ed71db7a37baa6688e585475545d
| 16,117
|
py
|
Python
|
tensorflow/python/keras/layers/preprocessing/table_utils_test.py
|
ashutom/tensorflow-upstream
|
c16069c19de9e286dd664abb78d0ea421e9f32d4
|
[
"Apache-2.0"
] | 190,993
|
2015-11-09T13:17:30.000Z
|
2022-03-31T23:05:27.000Z
|
tensorflow/python/keras/layers/preprocessing/table_utils_test.py
|
CaptainGizzy21/tensorflow
|
3457a2b122e50b4d44ceaaed5a663d635e5c22df
|
[
"Apache-2.0"
] | 48,461
|
2015-11-09T14:21:11.000Z
|
2022-03-31T23:17:33.000Z
|
tensorflow/python/keras/layers/preprocessing/table_utils_test.py
|
CaptainGizzy21/tensorflow
|
3457a2b122e50b4d44ceaaed5a663d635e5c22df
|
[
"Apache-2.0"
] | 104,981
|
2015-11-09T13:40:17.000Z
|
2022-03-31T19:51:54.000Z
|
# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for Keras lookup table utils."""
import os
import tempfile
import numpy as np
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import sparse_tensor
from tensorflow.python.keras import keras_parameterized
from tensorflow.python.keras.layers.preprocessing import preprocessing_test_utils
from tensorflow.python.keras.layers.preprocessing import table_utils
from tensorflow.python.ops import lookup_ops
from tensorflow.python.ops.ragged import ragged_factory_ops
from tensorflow.python.platform import gfile
from tensorflow.python.platform import test
def get_table(dtype=dtypes.string, oov_tokens=None):
table = lookup_ops.MutableHashTable(
key_dtype=dtype,
value_dtype=dtypes.int64,
default_value=-7,
name="index_table")
return table_utils.TableHandler(table, oov_tokens)
def get_static_table(tmpdir,
vocab_list,
mask_token=None,
dtype=dtypes.string,
oov_tokens=None):
vocabulary_file = os.path.join(tmpdir, "tmp_vocab.txt")
if dtype == dtypes.string:
with open(vocabulary_file, "w") as f:
f.write("\n".join(vocab_list) + "\n")
else:
with open(vocabulary_file, "w") as f:
f.write("\n".join([str(v) for v in vocab_list]) + "\n")
offset = ((0 if mask_token is None else 1) +
(len(oov_tokens) if oov_tokens is not None else 0))
init = lookup_ops.TextFileInitializer(
vocabulary_file,
dtype,
lookup_ops.TextFileIndex.WHOLE_LINE,
dtypes.int64,
lookup_ops.TextFileIndex.LINE_NUMBER,
value_index_offset=offset)
table = lookup_ops.StaticHashTable(init, default_value=-7)
return table_utils.TableHandler(
table,
oov_tokens,
mask_token=mask_token)
@keras_parameterized.run_all_keras_modes(always_skip_v1=True)
class CategoricalEncodingInputTest(
keras_parameterized.TestCase,
preprocessing_test_utils.PreprocessingLayerTest):
def test_sparse_string_input(self):
vocab_data = ["earth", "wind", "and", "fire"]
input_array = sparse_tensor.SparseTensor(
indices=[[0, 0], [1, 2]],
values=["fire", "michigan"],
dense_shape=[3, 4])
expected_indices = [[0, 0], [1, 2]]
expected_values = [5, 1]
expected_dense_shape = [3, 4]
table = get_table(oov_tokens=[1])
table.insert(vocab_data, range(2, len(vocab_data) + 2))
output_data = table.lookup(input_array)
self.assertAllEqual(expected_indices, output_data.indices)
self.assertAllEqual(expected_values, output_data.values)
self.assertAllEqual(expected_dense_shape, output_data.dense_shape)
def test_sparse_int_input(self):
vocab_data = np.array([10, 11, 12, 13], dtype=np.int64)
input_array = sparse_tensor.SparseTensor(
indices=[[0, 0], [1, 2]],
values=np.array([13, 32], dtype=np.int64),
dense_shape=[3, 4])
expected_indices = [[0, 0], [1, 2]]
expected_values = [5, 1]
expected_dense_shape = [3, 4]
table = get_table(dtype=dtypes.int64, oov_tokens=[1])
table.insert(vocab_data, range(2, len(vocab_data) + 2))
output_data = table.lookup(input_array)
self.assertAllEqual(expected_indices, output_data.indices)
self.assertAllEqual(expected_values, output_data.values)
self.assertAllEqual(expected_dense_shape, output_data.dense_shape)
def test_ragged_string_input(self):
vocab_data = ["earth", "wind", "and", "fire"]
input_array = ragged_factory_ops.constant(
[["earth", "wind", "fire"], ["fire", "and", "earth", "michigan"]])
expected_output = [[2, 3, 5], [5, 4, 2, 1]]
table = get_table(oov_tokens=[1])
table.insert(vocab_data, range(2, len(vocab_data) + 2))
output_data = table.lookup(input_array)
self.assertAllEqual(expected_output, output_data)
def test_ragged_int_input(self):
vocab_data = np.array([10, 11, 12, 13], dtype=np.int64)
input_array = ragged_factory_ops.constant([[10, 11, 13], [13, 12, 10, 42]],
dtype=np.int64)
expected_output = [[2, 3, 5], [5, 4, 2, 1]]
table = get_table(dtype=dtypes.int64, oov_tokens=[1])
table.insert(vocab_data, range(2, len(vocab_data) + 2))
output_data = table.lookup(input_array)
self.assertAllEqual(expected_output, output_data)
def test_tensor_multi_dim_values_fails(self):
key_data = np.array([0, 1], dtype=np.int64)
value_data = np.array([[11, 12], [21, 22]])
table = get_table(dtype=dtypes.int64, oov_tokens=[1, 2])
with self.assertRaisesRegex(ValueError, "must be 1-dimensional"):
table.insert(key_data, value_data)
@keras_parameterized.run_all_keras_modes(always_skip_v1=True)
class CategoricalEncodingMultiOOVTest(
keras_parameterized.TestCase,
preprocessing_test_utils.PreprocessingLayerTest):
def test_sparse_string_input_multi_bucket(self):
vocab_data = ["earth", "wind", "and", "fire"]
input_array = sparse_tensor.SparseTensor(
indices=[[0, 0], [1, 2]], values=["fire", "ohio"], dense_shape=[3, 4])
expected_indices = [[0, 0], [1, 2]]
expected_values = [6, 2]
expected_dense_shape = [3, 4]
table = get_table(oov_tokens=[1, 2])
table.insert(vocab_data, range(3, len(vocab_data) + 3))
output_data = table.lookup(input_array)
self.assertAllEqual(expected_indices, output_data.indices)
self.assertAllEqual(expected_values, output_data.values)
self.assertAllEqual(expected_dense_shape, output_data.dense_shape)
def test_sparse_int_input_multi_bucket(self):
vocab_data = np.array([10, 11, 12, 13], dtype=np.int64)
input_array = sparse_tensor.SparseTensor(
indices=[[0, 0], [1, 2]],
values=np.array([13, 132], dtype=np.int64),
dense_shape=[3, 4])
expected_indices = [[0, 0], [1, 2]]
expected_values = [6, 1]
expected_dense_shape = [3, 4]
table = get_table(dtype=dtypes.int64, oov_tokens=[1, 2])
table.insert(vocab_data, range(3, len(vocab_data) + 3))
output_data = table.lookup(input_array)
self.assertAllEqual(expected_indices, output_data.indices)
self.assertAllEqual(expected_values, output_data.values)
self.assertAllEqual(expected_dense_shape, output_data.dense_shape)
def test_ragged_string_input_multi_bucket(self):
vocab_data = ["earth", "wind", "and", "fire"]
input_array = ragged_factory_ops.constant([["earth", "wind", "fire"],
["fire", "and", "earth",
"ohio"]])
expected_output = [[3, 4, 6], [6, 5, 3, 2]]
table = get_table(oov_tokens=[1, 2])
table.insert(vocab_data, range(3, len(vocab_data) + 3))
output_data = table.lookup(input_array)
self.assertAllEqual(expected_output, output_data)
def test_ragged_int_input_multi_bucket(self):
vocab_data = np.array([10, 11, 12, 13], dtype=np.int64)
input_array = ragged_factory_ops.constant([[10, 11, 13], [13, 12, 10, 132]],
dtype=np.int64)
expected_output = [[3, 4, 6], [6, 5, 3, 1]]
table = get_table(dtype=dtypes.int64, oov_tokens=[1, 2])
table.insert(vocab_data, range(3, len(vocab_data) + 3))
output_data = table.lookup(input_array)
self.assertAllEqual(expected_output, output_data)
def test_tensor_int_input_multi_bucket(self):
vocab_data = np.array([10, 11, 12, 13], dtype=np.int64)
input_array = np.array([[13, 132], [13, 133]], dtype=np.int64)
expected_values = [[6, 1], [6, 2]]
table = get_table(dtype=dtypes.int64, oov_tokens=[1, 2])
table.insert(vocab_data, range(3, len(vocab_data) + 3))
output_data = table.lookup(input_array)
self.assertAllEqual(expected_values, output_data)
def test_tensor_string_input_multi_bucket(self):
vocab_data = ["earth", "wind", "and", "fire"]
input_array = [["earth", "wind", "fire", "michigan"],
["fire", "and", "earth", "ohio"]]
expected_output = [[3, 4, 6, 1], [6, 5, 3, 2]]
table = get_table(oov_tokens=[1, 2])
table.insert(vocab_data, range(3, len(vocab_data) + 3))
output_data = table.lookup(input_array)
self.assertAllEqual(expected_output, output_data)
@keras_parameterized.run_all_keras_modes(always_skip_v1=True)
class IndexLookupOutputTest(keras_parameterized.TestCase,
preprocessing_test_utils.PreprocessingLayerTest):
def test_int_output_default_lookup_value(self):
vocab_data = ["earth", "wind", "and", "fire"]
input_array = np.array([["earth", "wind", "and", "fire"],
["fire", "and", "earth", "michigan"]])
expected_output = [[1, 2, 3, 4], [4, 3, 1, -7]]
table = get_table(oov_tokens=None)
table.insert(vocab_data, range(1, len(vocab_data) + 1))
output_data = table.lookup(input_array)
self.assertAllEqual(expected_output, output_data)
def test_output_shape(self):
vocab_data = ["earth", "wind", "and", "fire"]
input_array = np.array([["earth", "wind", "and", "fire"],
["fire", "and", "earth", "michigan"]])
table = get_table()
table.insert(vocab_data, range(1, len(vocab_data) + 1))
output_data = table.lookup(input_array)
self.assertAllEqual(input_array.shape[1:], output_data.shape[1:])
def test_int_output_no_reserved_zero_default_lookup_value(self):
vocab_data = ["earth", "wind", "and", "fire"]
input_array = np.array([["earth", "wind", "and", "fire"],
["fire", "and", "earth", "michigan"]])
expected_output = [[0, 1, 2, 3], [3, 2, 0, -7]]
table = get_table(oov_tokens=None)
table.insert(vocab_data, range(len(vocab_data)))
output_data = table.lookup(input_array)
self.assertAllEqual(expected_output, output_data)
@keras_parameterized.run_all_keras_modes(always_skip_v1=True)
class StaticIndexLookupOutputTest(
keras_parameterized.TestCase,
preprocessing_test_utils.PreprocessingLayerTest):
def test_int_output_default_lookup_value(self):
vocab_data = ["earth", "wind", "and", "fire"]
input_array = np.array([["earth", "wind", "and", "fire"],
["fire", "and", "earth", "michigan"]])
expected_output = [[1, 2, 3, 4], [4, 3, 1, -7]]
table = get_static_table(
tmpdir=self.get_temp_dir(),
vocab_list=vocab_data,
mask_token="",
oov_tokens=None)
output_data = table.lookup(input_array)
self.assertAllEqual(expected_output, output_data)
def test_output_shape(self):
vocab_data = ["earth", "wind", "and", "fire"]
input_array = np.array([["earth", "wind", "and", "fire"],
["fire", "and", "earth", "michigan"]])
table = get_static_table(
tmpdir=self.get_temp_dir(), vocab_list=vocab_data, oov_tokens=None)
output_data = table.lookup(input_array)
self.assertAllEqual(input_array.shape[1:], output_data.shape[1:])
def test_int_output_no_reserved_zero_default_lookup_value(self):
vocab_data = ["earth", "wind", "and", "fire"]
input_array = np.array([["earth", "wind", "and", "fire"],
["fire", "and", "earth", "michigan"]])
expected_output = [[0, 1, 2, 3], [3, 2, 0, -7]]
table = get_static_table(
tmpdir=self.get_temp_dir(), vocab_list=vocab_data, oov_tokens=None)
output_data = table.lookup(input_array)
self.assertAllEqual(expected_output, output_data)
@keras_parameterized.run_all_keras_modes(always_skip_v1=True)
class CategoricalEncodingStaticInputTest(
keras_parameterized.TestCase,
preprocessing_test_utils.PreprocessingLayerTest):
def test_sparse_string_input(self):
vocab_data = ["earth", "wind", "and", "fire"]
input_array = sparse_tensor.SparseTensor(
indices=[[0, 0], [1, 2]],
values=["fire", "michigan"],
dense_shape=[3, 4])
expected_indices = [[0, 0], [1, 2]]
expected_values = [5, 1]
expected_dense_shape = [3, 4]
table = get_static_table(
tmpdir=self.get_temp_dir(),
vocab_list=vocab_data,
mask_token="",
oov_tokens=[1])
output_data = table.lookup(input_array)
self.assertAllEqual(expected_indices, output_data.indices)
self.assertAllEqual(expected_values, output_data.values)
self.assertAllEqual(expected_dense_shape, output_data.dense_shape)
def test_sparse_int_input(self):
vocab_data = np.array([10, 11, 12, 13], dtype=np.int64)
input_array = sparse_tensor.SparseTensor(
indices=[[0, 0], [1, 2]],
values=np.array([13, 32], dtype=np.int64),
dense_shape=[3, 4])
expected_indices = [[0, 0], [1, 2]]
expected_values = [5, 1]
expected_dense_shape = [3, 4]
table = get_static_table(
tmpdir=self.get_temp_dir(),
vocab_list=vocab_data,
dtype=dtypes.int64,
mask_token=0,
oov_tokens=[1])
output_data = table.lookup(input_array)
self.assertAllEqual(expected_indices, output_data.indices)
self.assertAllEqual(expected_values, output_data.values)
self.assertAllEqual(expected_dense_shape, output_data.dense_shape)
def test_ragged_string_input(self):
vocab_data = ["earth", "wind", "and", "fire"]
input_array = ragged_factory_ops.constant(
[["earth", "wind", "fire"], ["fire", "and", "earth", "michigan"]])
expected_output = [[2, 3, 5], [5, 4, 2, 1]]
table = get_static_table(
tmpdir=self.get_temp_dir(),
vocab_list=vocab_data,
mask_token="",
oov_tokens=[1])
output_data = table.lookup(input_array)
self.assertAllEqual(expected_output, output_data)
def test_ragged_int_input(self):
vocab_data = np.array([10, 11, 12, 13], dtype=np.int64)
input_array = ragged_factory_ops.constant([[10, 11, 13], [13, 12, 10, 42]],
dtype=np.int64)
expected_output = [[2, 3, 5], [5, 4, 2, 1]]
table = get_static_table(
tmpdir=self.get_temp_dir(),
vocab_list=vocab_data,
dtype=dtypes.int64,
mask_token=0,
oov_tokens=[1])
output_data = table.lookup(input_array)
self.assertAllEqual(expected_output, output_data)
class GetVocabularyFromFileTest(test.TestCase):
def setUp(self):
super(GetVocabularyFromFileTest, self).setUp()
dir_path = tempfile.mkdtemp(prefix=test.get_temp_dir())
self._vocab_path = os.path.join(dir_path, "vocab")
def test_only_line_separator_is_stripped(self):
expected = ["foo", " foo", "foo ", " foo "]
with gfile.GFile(self._vocab_path, "w") as writer:
for word in expected:
writer.write(word)
writer.write(os.linesep)
actual = actual = table_utils.get_vocabulary_from_file(self._vocab_path)
self.assertAllEqual(expected, actual)
def test_linux_file(self):
content = b"line1\nline2\nline3"
with gfile.GFile(self._vocab_path, "wb") as writer:
writer.write(content)
actual = table_utils.get_vocabulary_from_file(self._vocab_path)
self.assertAllEqual(["line1", "line2", "line3"], actual)
def test_windows_file(self):
content = b"line1\r\nline2\r\nline3"
with gfile.GFile(self._vocab_path, "wb") as writer:
writer.write(content)
actual = table_utils.get_vocabulary_from_file(self._vocab_path)
self.assertAllEqual(["line1", "line2", "line3"], actual)
if __name__ == "__main__":
test.main()
| 36.629545
| 81
| 0.664454
|
c255f64433901e09b9530d53ae4a8499c4a5a55f
| 3,690
|
py
|
Python
|
data/p3BR/R2/benchmark/startCirq271.py
|
UCLA-SEAL/QDiff
|
d968cbc47fe926b7f88b4adf10490f1edd6f8819
|
[
"BSD-3-Clause"
] | null | null | null |
data/p3BR/R2/benchmark/startCirq271.py
|
UCLA-SEAL/QDiff
|
d968cbc47fe926b7f88b4adf10490f1edd6f8819
|
[
"BSD-3-Clause"
] | null | null | null |
data/p3BR/R2/benchmark/startCirq271.py
|
UCLA-SEAL/QDiff
|
d968cbc47fe926b7f88b4adf10490f1edd6f8819
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 5/15/20 4:49 PM
# @File : grover.py
# qubit number=3
# total number=54
import cirq
import cirq.google as cg
from typing import Optional
import sys
from math import log2
import numpy as np
#thatsNoCode
from cirq.contrib.svg import SVGCircuit
# Symbols for the rotation angles in the QAOA circuit.
def make_circuit(n: int, input_qubit):
c = cirq.Circuit() # circuit begin
c.append(cirq.H.on(input_qubit[0])) # number=1
c.append(cirq.H.on(input_qubit[2])) # number=38
c.append(cirq.CZ.on(input_qubit[0],input_qubit[2])) # number=39
c.append(cirq.H.on(input_qubit[2])) # number=40
c.append(cirq.CNOT.on(input_qubit[0],input_qubit[2])) # number=31
c.append(cirq.H.on(input_qubit[2])) # number=42
c.append(cirq.CZ.on(input_qubit[0],input_qubit[2])) # number=43
c.append(cirq.H.on(input_qubit[2])) # number=44
c.append(cirq.H.on(input_qubit[2])) # number=48
c.append(cirq.CZ.on(input_qubit[0],input_qubit[2])) # number=49
c.append(cirq.H.on(input_qubit[2])) # number=50
c.append(cirq.X.on(input_qubit[2])) # number=46
c.append(cirq.H.on(input_qubit[2])) # number=51
c.append(cirq.CZ.on(input_qubit[0],input_qubit[2])) # number=52
c.append(cirq.H.on(input_qubit[2])) # number=53
c.append(cirq.CNOT.on(input_qubit[0],input_qubit[2])) # number=37
c.append(cirq.CNOT.on(input_qubit[0],input_qubit[2])) # number=33
c.append(cirq.H.on(input_qubit[2])) # number=25
c.append(cirq.CZ.on(input_qubit[0],input_qubit[2])) # number=26
c.append(cirq.H.on(input_qubit[2])) # number=27
c.append(cirq.H.on(input_qubit[1])) # number=7
c.append(cirq.CZ.on(input_qubit[2],input_qubit[1])) # number=8
c.append(cirq.rx(0.17592918860102857).on(input_qubit[2])) # number=34
c.append(cirq.rx(-0.3989822670059037).on(input_qubit[1])) # number=30
c.append(cirq.H.on(input_qubit[1])) # number=9
c.append(cirq.H.on(input_qubit[1])) # number=18
c.append(cirq.CZ.on(input_qubit[2],input_qubit[1])) # number=19
c.append(cirq.H.on(input_qubit[1])) # number=20
c.append(cirq.Y.on(input_qubit[1])) # number=14
c.append(cirq.H.on(input_qubit[1])) # number=22
c.append(cirq.CZ.on(input_qubit[2],input_qubit[1])) # number=23
c.append(cirq.H.on(input_qubit[1])) # number=24
c.append(cirq.Z.on(input_qubit[2])) # number=3
c.append(cirq.Z.on(input_qubit[1])) # number=41
c.append(cirq.X.on(input_qubit[1])) # number=17
c.append(cirq.Y.on(input_qubit[2])) # number=5
c.append(cirq.X.on(input_qubit[2])) # number=21
c.append(cirq.CNOT.on(input_qubit[1],input_qubit[0])) # number=15
c.append(cirq.CNOT.on(input_qubit[1],input_qubit[0])) # number=16
c.append(cirq.X.on(input_qubit[2])) # number=28
c.append(cirq.X.on(input_qubit[2])) # number=29
# circuit end
c.append(cirq.measure(*input_qubit, key='result'))
return c
def bitstring(bits):
return ''.join(str(int(b)) for b in bits)
if __name__ == '__main__':
qubit_count = 4
input_qubits = [cirq.GridQubit(i, 0) for i in range(qubit_count)]
circuit = make_circuit(qubit_count,input_qubits)
circuit = cg.optimized_for_sycamore(circuit, optimizer_type='sqrt_iswap')
circuit_sample_count =2000
simulator = cirq.Simulator()
result = simulator.run(circuit, repetitions=circuit_sample_count)
frequencies = result.histogram(key='result', fold_func=bitstring)
writefile = open("../data/startCirq271.csv","w+")
print(format(frequencies),file=writefile)
print("results end", file=writefile)
print(circuit.__len__(), file=writefile)
print(circuit,file=writefile)
writefile.close()
| 38.4375
| 77
| 0.683198
|
8f2d13ad2be126171bf810ee327b5409508ffc27
| 393
|
py
|
Python
|
ProgrammingProject4/Projectsetup/volumes/task3code.py
|
samuelmmorse/MGSsecurity
|
425621afca366244cdcdc5b991538a618d45fd12
|
[
"Apache-2.0"
] | null | null | null |
ProgrammingProject4/Projectsetup/volumes/task3code.py
|
samuelmmorse/MGSsecurity
|
425621afca366244cdcdc5b991538a618d45fd12
|
[
"Apache-2.0"
] | null | null | null |
ProgrammingProject4/Projectsetup/volumes/task3code.py
|
samuelmmorse/MGSsecurity
|
425621afca366244cdcdc5b991538a618d45fd12
|
[
"Apache-2.0"
] | null | null | null |
from scapy.all import *
a = IP()
dest = '8.8.8.8'
a.dst = dest
b = ICMP()
a.ttl = 1
while (a.ttl < 100):
reply = sr1(a/b, verbose=0, timeout=2)
if (reply == None):
print(str(a.ttl) + "\t* * * *")
a.ttl += 1
continue
print(str(a.ttl) + "\t" + reply.src)
if (reply.src == dest):
break
a.ttl +=1
#packet = Ether()/IP(dst='10.9.0.6')/TCP(dport=23,flags='S')
#send(packet)
| 13.551724
| 60
| 0.544529
|
ab10749ca20cb742fb91a11cac71af28bd52e2c1
| 3,823
|
py
|
Python
|
whatpulse/__init__.py
|
sl4vkek/python-whatpulse
|
bf8abad0a9d0cfdbb96c3cdfc58282f98959180f
|
[
"WTFPL"
] | null | null | null |
whatpulse/__init__.py
|
sl4vkek/python-whatpulse
|
bf8abad0a9d0cfdbb96c3cdfc58282f98959180f
|
[
"WTFPL"
] | null | null | null |
whatpulse/__init__.py
|
sl4vkek/python-whatpulse
|
bf8abad0a9d0cfdbb96c3cdfc58282f98959180f
|
[
"WTFPL"
] | 1
|
2020-03-18T21:06:31.000Z
|
2020-03-18T21:06:31.000Z
|
import requests
import json
from urllib.parse import urljoin
class InvalidIPError(Exception):
pass
class RealTime:
def __init__(self, clicks_pressed, keys_typed, download_speed, upload_speed):
self.clicks_typed = float(clicks_pressed)
self.keys_typed = float(keys_typed)
self.download_speed = download_speed
self.upload_speed = upload_speed
class Rank:
def __init__(self, clicks_pressed, keys_typed, upload, download, uptime):
self.clicks_pressed = int(clicks_pressed)
self.keys_typed = int(keys_typed)
self.upload = int(upload)
self.download = int(download)
self.uptime = int(uptime)
class AccountTotals:
def __init__(self, clicks_pressed, keys_typed, uptime, download, upload, rank):
self.clicks_pressed = int(clicks_pressed)
self.keys_typed = int(keys_typed)
self.uptime = int(uptime)
self.download = int(download)
self.upload = int(upload)
self.rank = rank
class UnPulsed:
def __init__(self, clicks_pressed, keys_typed, download, upload, uptime):
self.clicks_pressed = int(clicks_pressed)
self.keys_typed = int(keys_typed)
self.download = int(download)
self.upload = int(upload)
self.uptime = int(uptime)
class WhatPulse:
def __init__(self, ip='localhost', port=3490):
self.requests_session = requests.Session()
self.ip = ip
self.port = port
self.uri = 'http://' + ip + ':' + str(port)
@staticmethod
def __check_status_code(r):
if r.status_code == 200:
return True
elif r.status_code == 401:
raise InvalidIPError('Connecting IP address not allowed in the client settings')
def pulse(self):
url = urljoin(self.uri, '/v1/pulse')
r = self.requests_session.post(url)
return WhatPulse.__check_status_code(r)
def get_realtime_statistics(self):
url = urljoin(self.uri, '/v1/realtime')
r = self.requests_session.get(url)
WhatPulse.__check_status_code(r)
realtime_json =r.content.decode()
realtime_json = json.loads(realtime_json)
return RealTime(
clicks_pressed=realtime_json['clicks'],
keys_typed=realtime_json['keys'],
download_speed=realtime_json['download'],
upload_speed=realtime_json['upload']
)
def get_account_total_statistics(self):
url = urljoin(self.uri, '/v1/account-totals')
r = self.requests_session.get(url)
WhatPulse.__check_status_code(r)
d = r.content.decode()
d = json.loads(d)
dr = d['ranks']
return AccountTotals(
clicks_pressed=d['clicks'],
keys_typed=d['keys'],
download=d['download'],
upload=d['upload'],
uptime=d['uptime'],
rank=Rank(
clicks_pressed=dr['rank_clicks'],
keys_typed=dr['rank_keys'],
upload=dr['rank_upload'],
download=dr['rank_download'],
uptime=dr['rank_uptime']
)
)
def get_unpulsed_stats(self):
url = urljoin(self.uri, 'http://localhost:3490/v1/unpulsed')
r = self.requests_session.get(url)
WhatPulse.__check_status_code(r)
unpulsed_json = r.content.decode()
unpulsed_json = json.loads(unpulsed_json)
return UnPulsed(
clicks_pressed=unpulsed_json['clicks'],
keys_typed=unpulsed_json['keys'],
upload=unpulsed_json['upload'],
download=unpulsed_json['download'],
uptime=unpulsed_json['uptime']
)
| 32.398305
| 93
| 0.597698
|
f52b0506c75b802e0a080bdb9ae10715baad9e04
| 18,515
|
py
|
Python
|
CustomOp/MetaOptimizer.py
|
ozzzp/MLHF
|
119d8fbedb8661f0474389c6a048decb2505cf45
|
[
"Apache-2.0"
] | 3
|
2018-08-11T02:51:51.000Z
|
2019-01-28T14:03:59.000Z
|
CustomOp/MetaOptimizer.py
|
ozzzp/MLHF
|
119d8fbedb8661f0474389c6a048decb2505cf45
|
[
"Apache-2.0"
] | null | null | null |
CustomOp/MetaOptimizer.py
|
ozzzp/MLHF
|
119d8fbedb8661f0474389c6a048decb2505cf45
|
[
"Apache-2.0"
] | null | null | null |
import itertools
# import numpy as np
import os
import tensorflow as tf
import tensorflow.contrib as tfcb
from tensorflow.python.framework import function
from tensorflow.python.training.optimizer import _var_key
from tensorflow.python.training.slot_creator import create_slot_with_initializer
from .gradients_impl import gradients
from .hession_loss import loss_types
from .op_r_forward import op_r_forward_funcs
from .rnn import RNN_optimizers, slot_name, _EPSILON
class MetaHessionFreeOptimizer(tf.train.GradientDescentOptimizer):
def __init__(self, learning_rate, optimizers=RNN_optimizers, is_training=False, use_locking=False,
name="MetaHessionFree", iter=5, damping=2e-5, damping_type='regular', decay=2 / 3, print_log=False,
**kwargs):
self._optimizers = optimizers(**kwargs)
self._is_training = is_training
self._n = iter
self._print_log = print_log
self._damping = damping
self._decay = decay
assert damping_type in ['regular', 'LM_heuristics']
self._damping_type = damping_type
super(MetaHessionFreeOptimizer, self).__init__(learning_rate=learning_rate,
use_locking=use_locking,
name=name)
@staticmethod
def _r_forward(r_v_list, out, input_list):
with tf.name_scope('difference_forward'):
r_dict = {v.value(): r for r, v in r_v_list}
r_dict.update({input: tf.zeros_like(input) for input in input_list})
used_ops = tfcb.graph_editor.get_backward_walk_ops(seed_ops=out.op, stop_at_ts=list(r_dict.keys()))
used_ops = reversed(used_ops)
while True:
last_ops = []
for op in used_ops:
has_floating = set(i.dtype.is_floating for i in op.outputs)
if True not in has_floating:
outs = [None for i in op.outputs]
else:
try:
r_input = [r_dict[i] for i in op.inputs]
except:
last_ops.append(op)
continue
assert op.type in op_r_forward_funcs, op.type
forward_func = op_r_forward_funcs[op.type]
outs = forward_func(op, r_input)
r_dict.update({v: r for r, v in zip(outs, op.outputs)})
if last_ops:
used_ops = last_ops
else:
break
assert out in r_dict
return r_dict[out]
def _generate_Hv_fun(self, var_list, out, input_list, Hl_func, ds=None, damping=0):
def shape_func(op):
return [var.get_shape() for var in var_list]
if ds is not None:
dampings = [self._generate_d(d, var=v) + damping for d, v in zip(ds, var_list)]
else:
dampings = None
@function.Defun(*[v.dtype for v in var_list], shape_func=shape_func)
def Hv(*vs):
assert len(var_list) == len(vs)
for var, v in zip(var_list, vs):
v.set_shape(var.get_shape())
with tf.name_scope('Hession_product', values=vs):
# difference forward
r_out = self._r_forward(r_v_list=list(zip(vs, var_list)), out=out, input_list=input_list)
print('difference forward done')
rd_out = Hl_func(r_out, out)
# TODO define RNN #To stable Hession
# Oops, still no idea.
# difference backword, same as common back propagation but with special init grad.
rds = self._rd_backward(out=out, rd_out=rd_out, v_list=var_list)
print('difference backward done')
'''
test_case = set(tf.gradients(rds, var_list))
assert test_case == {None}
test_case = set(tf.gradients(rds, vs))
assert None not in test_case
'''
return tuple(rds)
def grad_Hv(op, *vs):
Hv_extra_inputs_backup = Hv._extra_inputs
Hv._extra_inputs = list(op.inputs)[len(vs):]
outs = list(Hv(*vs))
nones = [None] * (len(op.inputs) - len(outs))
Hv._extra_inputs = Hv_extra_inputs_backup
return tuple(outs + nones)
Hv._python_grad_func = grad_Hv
def _Hv(*vs):
rds = Hv(*vs)
if dampings is not None:
rds = [rd + damping * v for rd, damping, v in zip(rds, dampings, vs)]
return tuple(rds)
return _Hv
def _get_or_make_slot_with_initializer(self, var, initializer, shape, dtype,
slot_name, op_name):
"""Find or create a slot for a variable, using an Initializer.
Args:
var: A `Variable` object.
initializer: An `Initializer`. The initial value of the slot.
shape: Shape of the initial value of the slot.
dtype: Type of the value of the slot.
slot_name: Name for the slot.
op_name: Name to use when scoping the Variable that
needs to be created for the slot.
Returns:
A `Variable` object.
"""
named_slots = self._slot_dict(slot_name)
if _var_key(var) not in named_slots:
with tf.variable_scope('slots', reuse=tf.AUTO_REUSE):
named_slots[_var_key(var)] = create_slot_with_initializer(
var, initializer, shape, dtype, op_name)
return named_slots[_var_key(var)]
@staticmethod
def _inner_product(A_list, B_list):
sum_list = [tf.reduce_sum(A * B) for A, B in zip(A_list, B_list)]
return tf.add_n(sum_list)
def _generate_x(self, d, var=None):
with tf.name_scope('rnn_x'):
name = os.path.join(*[i.split('_')[0] for i in var.op.name.rsplit('/', 3)[-2:]])
assert name in self._optimizers, 'sorry, rnn optimizer of {} is not defined'.format(name)
x_fn = self._optimizers[name]['x']
out = x_fn(d, var=var, optimizer=self)
return out
def _generate_d(self, d, var=None):
with tf.name_scope('rnn_d'):
name = os.path.join(*[i.split('_')[0] for i in var.op.name.rsplit('/', 3)[-2:]])
assert name in self._optimizers, 'sorry, rnn optimizer of {} is not defined'.format(name)
d_fn = self._optimizers[name]['d']
out = d_fn(d, var=var, optimizer=self)
return out
def _generate_state_transform(self, r_1, x_1, var=None):
with tf.name_scope('rnn_sf'):
name = os.path.join(*[i.split('_')[0] for i in var.op.name.rsplit('/', 3)[-2:]])
assert name in self._optimizers, 'sorry, rnn state transform of {} is not defined'.format(name)
sf_fn = self._optimizers[name]['sf']
sf_fn(r_1=r_1, x_1=x_1, var=var, optimizer=self)
def _generate_y(self, d, r_0, x_0, var=None):
with tf.name_scope('rnn_y'):
name = os.path.join(*[i.split('_')[0] for i in var.op.name.rsplit('/', 3)[-2:]])
assert name in self._optimizers, 'sorry, rnn optimizer of {} is not defined'.format(name)
y_fn = self._optimizers[name]['y']
out = y_fn(d, r_0, x_0, var=var, optimizer=self)
return out
def _rd_backward(self, out, rd_out, v_list):
rd_list = gradients(out, v_list, grad_ys=rd_out)
return rd_list
def compute_gradients(self, *args, **kwargs):
raise NotImplementedError("Sorry, call compute_gradients directly is not allowed")
def _compute_gradients(self, *args, **kwargs):
return super(MetaHessionFreeOptimizer, self).compute_gradients(*args,
gate_gradients=MetaHessionFreeOptimizer.GATE_NONE,
**kwargs)
def apply_gradients(self, *args, **kwargs):
raise NotImplementedError("Sorry, call compute_gradients directly is not allowed")
def _apply_gradients(self, *args, **kwargs):
return super(MetaHessionFreeOptimizer, self).apply_gradients(*args, **kwargs)
def set_slot_shadow(self, var, val, slot_name, replace=False):
named_slots = self._slot_dict(slot_name + '_shadow')
key = var if isinstance(var, str) else _var_key(var)
if replace:
assert key in named_slots
else:
assert key not in named_slots
named_slots[key] = val
def _apply_state(self, var):
ops = []
for rnn_type in ['x', 'y', 'd']:
for l in itertools.count():
for i in itertools.count():
slot_var = self.get_slot(var, slot_name(l, i, rnn_type))
if slot_var is None:
break
slot_val = self.get_slot(var, slot_name(l, i, rnn_type) + '_shadow')
if self._is_training:
ops.append((slot_var, slot_val))
else:
ops.append(tf.assign(slot_var, slot_val))
if i == 0:
break
for val_name in ['r_1', 'x_1']:
slot_var = self.get_slot(var, val_name)
slot_val = self.get_slot(var, val_name + '_shadow')
assert isinstance(slot_var, tf.Variable)
assert isinstance(slot_val, tf.Tensor)
if self._is_training:
ops.append((slot_var, slot_val))
else:
ops.append(tf.assign(slot_var, slot_val))
return ops
def _apply_dense(self, grad, var):
ops = self._apply_state(var)
with tf.control_dependencies(ops):
return super(MetaHessionFreeOptimizer, self)._apply_dense(grad, var)
def _apply_sparse(self, grad, var):
raise NotImplementedError
def _resource_apply_dense(self, grad, handle):
raise NotImplementedError
def _resource_apply_sparse(self, grad, handle, indices):
raise NotImplementedError
def _resource_apply_sparse_duplicate_indices(self, grad, handle, indices):
raise NotImplementedError
def minimize(self, loss_type, out, label, input_list, global_step=None, var_list=None, network_fn=None):
assert loss_type in loss_types
loss_fn, Hl_fun = loss_types[loss_type]
# 1st forward
loss = loss_fn(out, label)
print('1st forward done')
# 2nd backward
d_and_v = self._compute_gradients(loss, var_list=var_list)
print('2nd backward done')
if self._damping_type == 'LM_heuristics':
assert callable(network_fn)
self._last_loss = tf.get_variable('last_loss', initializer=tf.zeros_initializer, shape=[], dtype=tf.float32)
self._q_difference = tf.get_variable('q_difference', initializer=tf.zeros_initializer, shape=[],
dtype=tf.float32)
self._last_inputs = [
tf.get_variable('last_input_{}'.format(i), initializer=tf.zeros_initializer, shape=input.shape,
dtype=input.dtype, trainable=False)
for i, input in enumerate(input_list)]
self._last_label = tf.get_variable('last_label', initializer=tf.zeros_initializer, shape=label.shape,
dtype=label.dtype, trainable=False)
self._damping = tf.get_variable('damping', initializer=self._damping, dtype=tf.float32,
trainable=False)
loss_on_last_batch = loss_fn(network_fn(*self._last_inputs), self._last_label)
rho = (
loss_on_last_batch - self._last_loss) / self._q_difference # tf.Print(self._q_difference, [self._q_difference, loss_on_last_batch, self._last_loss])
rho = tf.where(tf.equal(self._q_difference, 0), 0.5, rho)
# rho = tf.Print(rho, [rho], message='rho:')
decay = tf.train.piecewise_constant(rho, [0.25, 0.75], [1 / self._decay, 1., self._decay])
# decay = tf.Print(decay, [decay], message='decay:')
damping = self._damping * decay
damping = tf.clip_by_value(damping, 1e-3, 1)
# damping = tf.Print(damping, [damping], message='damping:')
else:
damping = self._damping
ds = [tf.stop_gradient(d) for d, _ in d_and_v if d is not None]
if not ds:
raise ValueError(
"No gradients provided for any variable, check your graph for ops"
" that do not support gradients, between variables %s and loss %s." %
([str(v) for _, v in d_and_v], loss))
var_list = [v for d, v in d_and_v if d is not None]
Hv_fun = self._generate_Hv_fun(ds=ds, var_list=var_list, out=out, input_list=input_list, Hl_func=Hl_fun,
damping=damping)
# generate x_0 from (d, r_1, x_1)
x_is = [self._generate_x(d, var=v) for d, v in zip(ds, var_list)]
print('rnn_x generated')
H_xis = list(Hv_fun(*x_is))
# r_0 = b - H_x0 = d - H_x0
ds_length = tf.global_norm(ds)
ds_length_sq = ds_length ** 2
# gamma_0 = self._inner_product(ds, H_x0s) / ds_length_sq
r_is = [d - H_xi for d, H_xi in zip(ds, H_xis)]
# y_0 = r_0 * p
# p = f(r_0, x_0, d)
# so, y_0 =r_0 * f(r_0, x_0, d)
Ps = [self._generate_y(d, r_i, x_i, var=v) for d, r_i, x_i, v in
zip(ds, r_is, x_is, var_list)]
print('rnn_y generated')
y_is = [P * r_i for P, r_i in zip(Ps, r_is)]
p_is = y_is
beta_part = self._inner_product(r_is, y_is)
def _cal(p_is, r_is, x_is, beta_part):
# y_0 as p_0
# cal H_p0 = H_y0
H_pis = list(Hv_fun(*p_is))
# \alpha = <r_0, y_0>/<p0 , H_p0> = <r_0, y_0>/<y_0 , H_y0>
p2 = self._inner_product(p_is, H_pis)
alpha = beta_part / tf.maximum(p2, _EPSILON)
# x_1 = x_0 + \alpha p_0 = x_0 + \alpha y_0
x_is = [x_i + alpha * p_i for x_i, p_i in zip(x_is, p_is)]
# r_1 = r_0 - \alpha H_p0 = r_0 - \alpha H_y0
# gamma_1 = self._inner_product(ds, H_y0s) / ds_length_sq
r_is = [r_i - alpha * H_pi for r_i, H_pi in zip(r_is, H_pis)]
y_is = [P * r_i for P, r_i in zip(Ps, r_is)]
new_beta_part = self._inner_product(r_is, y_is)
beta = new_beta_part / tf.maximum(beta_part, _EPSILON)
beta_part = new_beta_part
p_is = [y_i + beta * p_i for y_i, p_i in zip(y_is, p_is)]
return p_is, r_is, x_is, beta_part
def _cond(p_is, r_is, x_is, beta_part):
return tf.global_norm(r_is) >= _EPSILON
loop_vars = (p_is, r_is, x_is, beta_part)
p_is, r_is, x_is, beta_part = \
tf.while_loop(_cond, _cal, loop_vars, swap_memory=True,
back_prop=self._is_training,
parallel_iterations=1,
maximum_iterations=self._n)
# apply state transform.
for r_i, x_i, var in zip(r_is, x_is, var_list):
self._generate_state_transform(r_i, x_i, var=var)
print('rnn_sf generated')
inner_p_ds_x_is = self._inner_product(ds, x_is)
H_xis = [d - r_i for d, r_i in zip(ds, r_is)]
x_is_H_xis = self._inner_product(x_is, H_xis)
if self._damping_type == 'LM_heuristics':
q_difference = - self._learning_rate * inner_p_ds_x_is + self._learning_rate ** 2 / 2 * x_is_H_xis
if self._is_training:
hession_loss = - inner_p_ds_x_is / tf.sqrt(x_is_H_xis)
# minize r_1
# assert there should be no grad which would backprop from x_1s to nn variable.
r_loss = tf.global_norm(r_is)
var_length = tf.stop_gradient(tf.global_norm(var_list))
if self._print_log:
H_ds = list(Hv_fun(*ds))
standard_loss = ds_length_sq / tf.sqrt(self._inner_product(ds, H_ds))
hession_loss = tf.Print(hession_loss,
[tf.global_norm(x_is) / ds_length, r_loss, -hession_loss,
standard_loss, inner_p_ds_x_is,
ds_length_sq,
var_length, loss],
message='x1l/gl, rl, hs, ss, hip, sip, vl, loss:')
x_is, _ = tf.clip_by_global_norm(x_is, var_length * (0.25 / self._learning_rate))
next_state = []
for x_i, v in zip(x_is, var_list):
noise = tf.random_uniform(x_i.get_shape(), 1 - 2e-2, 1 + 2e-2)
next_state.append((v, tf.stop_gradient(v) - noise * self._learning_rate * x_i))
next_state.extend(self._apply_state(v))
assert len(next_state) == len(tf.global_variables(scope='slots')) + len(tf.trainable_variables(scope='nn'))
if self._damping_type == 'LM_heuristics':
for val, var in zip(input_list, self._last_inputs):
next_state.append((var, val))
next_state.append((self._damping, damping))
next_state.append((self._last_loss, loss))
next_state.append((self._last_label, label))
next_state.append((self._q_difference, q_difference))
return next_state, loss, hession_loss, r_loss
else:
if self._damping_type == 'LM_heuristics':
depends = [tf.assign(var, val) for val, var in zip(input_list, self._last_inputs)]
depends.append(tf.assign(self._damping, damping))
depends.append(tf.assign(self._last_loss, loss))
depends.append(tf.assign(self._last_label, label))
depends.append(tf.assign(self._q_difference, q_difference))
else:
depends = []
with tf.control_dependencies(depends):
return self._apply_gradients(list(zip(x_is, var_list)), global_step=global_step)
| 44.614458
| 179
| 0.569052
|
f9aabc0db2d79dcd8f61805e5399ec1c253439e2
| 556
|
py
|
Python
|
cpsplines/utils/timer.py
|
ManuelNavarroGarcia/cpsplines
|
544e8ccf7e438a192dea6c4a4e685d9346f57f9a
|
[
"MIT"
] | null | null | null |
cpsplines/utils/timer.py
|
ManuelNavarroGarcia/cpsplines
|
544e8ccf7e438a192dea6c4a4e685d9346f57f9a
|
[
"MIT"
] | 1
|
2022-02-12T17:33:08.000Z
|
2022-02-12T17:33:08.000Z
|
cpsplines/utils/timer.py
|
ManuelNavarroGarcia/cpsplines
|
544e8ccf7e438a192dea6c4a4e685d9346f57f9a
|
[
"MIT"
] | null | null | null |
from contextlib import contextmanager
from timeit import default_timer
from typing import Optional
@contextmanager
def timer(tag: Optional[str] = None) -> None:
"""
Computes the elapsed time that a task last.
Parameters
----------
tag : str, optional
The name of the task. By default, None.
"""
start = default_timer()
try:
yield
finally:
end = default_timer()
header = "Elapsed time (s)" if tag is None else f"[{tag}] Elapsed time (s)"
print(f"{header}: {end - start:.6f}")
| 22.24
| 83
| 0.609712
|
9c07aa3cfaf87ecf569cebf670dc523efee96fdd
| 3,527
|
py
|
Python
|
paddlex/cv/datasets/easydata_cls.py
|
mingren8888/PaddleX
|
d44249da26898b4b77491f8a5e8a86d680e52fa4
|
[
"Apache-2.0"
] | null | null | null |
paddlex/cv/datasets/easydata_cls.py
|
mingren8888/PaddleX
|
d44249da26898b4b77491f8a5e8a86d680e52fa4
|
[
"Apache-2.0"
] | null | null | null |
paddlex/cv/datasets/easydata_cls.py
|
mingren8888/PaddleX
|
d44249da26898b4b77491f8a5e8a86d680e52fa4
|
[
"Apache-2.0"
] | null | null | null |
# copyright (c) 2020 PaddlePaddle Authors. All Rights Reserve.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
import os.path as osp
import random
import copy
import json
import paddlex.utils.logging as logging
from paddlex.utils import path_normalization
from .imagenet import ImageNet
from .dataset import is_pic
from .dataset import get_encoding
class EasyDataCls(ImageNet):
"""读取EasyDataCls格式的分类数据集,并对样本进行相应的处理。
Args:
data_dir (str): 数据集所在的目录路径。
file_list (str): 描述数据集图片文件和对应标注文件的文件路径(文本内每行路径为相对data_dir的相对路)。
label_list (str): 描述数据集包含的类别信息文件路径。
transforms (paddlex.cls.transforms): 数据集中每个样本的预处理/增强算子。
num_workers (int|str): 数据集中样本在预处理过程中的线程或进程数。默认为'auto'。当设为'auto'时,根据
系统的实际CPU核数设置`num_workers`: 如果CPU核数的一半大于8,则`num_workers`为8,否则为CPU核
数的一半。
buffer_size (int): 数据集中样本在预处理过程中队列的缓存长度,以样本数为单位。默认为100。
parallel_method (str): 数据集中样本在预处理过程中并行处理的方式,支持'thread'
线程和'process'进程两种方式。默认为'process'(Windows和Mac下会强制使用thread,该参数无效)。
shuffle (bool): 是否需要对数据集中样本打乱顺序。默认为False。
"""
def __init__(self,
data_dir,
file_list,
label_list,
transforms=None,
num_workers='auto',
buffer_size=8,
parallel_method='process',
shuffle=False):
super(ImageNet, self).__init__(
transforms=transforms,
num_workers=num_workers,
buffer_size=buffer_size,
parallel_method=parallel_method,
shuffle=shuffle)
self.file_list = list()
self.labels = list()
self._epoch = 0
with open(label_list, encoding=get_encoding(label_list)) as f:
for line in f:
item = line.strip()
self.labels.append(item)
logging.info("Starting to read file list from dataset...")
with open(file_list, encoding=get_encoding(file_list)) as f:
for line in f:
img_file, json_file = [osp.join(data_dir, x) \
for x in line.strip().split()[:2]]
img_file = path_normalization(img_file)
json_file = path_normalization(json_file)
if not is_pic(img_file):
continue
if not osp.isfile(json_file):
continue
if not osp.exists(img_file):
raise IOError('The image file {} is not exist!'.format(
img_file))
with open(json_file, mode='r', \
encoding=get_encoding(json_file)) as j:
json_info = json.load(j)
label = json_info['labels'][0]['name']
self.file_list.append([img_file, self.labels.index(label)])
self.num_samples = len(self.file_list)
logging.info("{} samples in file {}".format(
len(self.file_list), file_list))
| 39.629213
| 77
| 0.617806
|
f4663d022e5c891a06c7f5ad97e3101ef049e76d
| 1,010
|
py
|
Python
|
common/ops/merge_ops.py
|
vahidk/TensorflowFramework
|
a9377d0dd8f5ac93e810876fbe8987990e3c728f
|
[
"BSD-3-Clause"
] | 129
|
2017-08-19T07:18:55.000Z
|
2020-07-16T03:05:31.000Z
|
common/ops/merge_ops.py
|
vahidk/TensorflowFramework
|
a9377d0dd8f5ac93e810876fbe8987990e3c728f
|
[
"BSD-3-Clause"
] | 5
|
2017-09-13T08:55:31.000Z
|
2019-07-12T06:52:07.000Z
|
common/ops/merge_ops.py
|
vahidk/TensorflowFramework
|
a9377d0dd8f5ac93e810876fbe8987990e3c728f
|
[
"BSD-3-Clause"
] | 46
|
2017-08-21T21:18:50.000Z
|
2022-03-12T05:57:02.000Z
|
"""Merge ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
from common.ops import regularizer_ops
def merge(tensors, units,
activation=tf.nn.relu,
name=None,
weight_decay=0.0,
weight_regularizer="l2",
**kwargs):
"""Merge tensors with broadcasting support."""
with tf.variable_scope(name, default_name="merge"):
projs = []
for i, tensor in enumerate(tensors):
proj = tf.keras.layers.Dense(
units,
use_bias=False,
kernel_initializer=tf.glorot_uniform_initializer(),
kernel_regularizer=regularizer_ops.weight_regularizer(
weight_decay, weight_regularizer),
name="proj_%d" % i,
**kwargs).apply(tensor)
projs.append(proj)
result = projs.pop()
for proj in projs:
result = result + proj
if activation:
result = activation(result)
return result
| 25.897436
| 64
| 0.647525
|
3dc86378efd15da478c27d56780e6e3b3e2b64c4
| 7,647
|
py
|
Python
|
sandbox/rocky/tf/policies/categorical_gru_policy.py
|
RussellM2020/maml_gps
|
631560dfd4e23dc2da9bfbbd2e3c5252aa9775c5
|
[
"MIT"
] | 541
|
2017-07-19T00:49:13.000Z
|
2022-03-28T21:14:23.000Z
|
sandbox/rocky/tf/policies/categorical_gru_policy.py
|
RussellM2020/maml_gps
|
631560dfd4e23dc2da9bfbbd2e3c5252aa9775c5
|
[
"MIT"
] | 13
|
2018-02-28T02:29:58.000Z
|
2021-03-21T13:49:49.000Z
|
sandbox/rocky/tf/policies/categorical_gru_policy.py
|
RussellM2020/maml_gps
|
631560dfd4e23dc2da9bfbbd2e3c5252aa9775c5
|
[
"MIT"
] | 168
|
2017-07-19T12:21:01.000Z
|
2022-02-22T00:46:40.000Z
|
import numpy as np
import sandbox.rocky.tf.core.layers as L
import tensorflow as tf
from sandbox.rocky.tf.core.layers_powered import LayersPowered
from sandbox.rocky.tf.core.network import GRUNetwork, MLP
from sandbox.rocky.tf.distributions.recurrent_categorical import RecurrentCategorical
from sandbox.rocky.tf.misc import tensor_utils
from sandbox.rocky.tf.spaces.discrete import Discrete
from sandbox.rocky.tf.policies.base import StochasticPolicy
from rllab.core.serializable import Serializable
from rllab.misc import special
from rllab.misc.overrides import overrides
class CategoricalGRUPolicy(StochasticPolicy, LayersPowered, Serializable):
def __init__(
self,
name,
env_spec,
hidden_dim=32,
feature_network=None,
state_include_action=True,
hidden_nonlinearity=tf.tanh,
gru_layer_cls=L.GRULayer,
):
"""
:param env_spec: A spec for the env.
:param hidden_dim: dimension of hidden layer
:param hidden_nonlinearity: nonlinearity used for each hidden layer
:return:
"""
with tf.variable_scope(name):
assert isinstance(env_spec.action_space, Discrete)
Serializable.quick_init(self, locals())
super(CategoricalGRUPolicy, self).__init__(env_spec)
obs_dim = env_spec.observation_space.flat_dim
action_dim = env_spec.action_space.flat_dim
if state_include_action:
input_dim = obs_dim + action_dim
else:
input_dim = obs_dim
l_input = L.InputLayer(
shape=(None, None, input_dim),
name="input"
)
if feature_network is None:
feature_dim = input_dim
l_flat_feature = None
l_feature = l_input
else:
feature_dim = feature_network.output_layer.output_shape[-1]
l_flat_feature = feature_network.output_layer
l_feature = L.OpLayer(
l_flat_feature,
extras=[l_input],
name="reshape_feature",
op=lambda flat_feature, input: tf.reshape(
flat_feature,
tf.pack([tf.shape(input)[0], tf.shape(input)[1], feature_dim])
),
shape_op=lambda _, input_shape: (input_shape[0], input_shape[1], feature_dim)
)
prob_network = GRUNetwork(
input_shape=(feature_dim,),
input_layer=l_feature,
output_dim=env_spec.action_space.n,
hidden_dim=hidden_dim,
hidden_nonlinearity=hidden_nonlinearity,
output_nonlinearity=tf.nn.softmax,
gru_layer_cls=gru_layer_cls,
name="prob_network"
)
self.prob_network = prob_network
self.feature_network = feature_network
self.l_input = l_input
self.state_include_action = state_include_action
flat_input_var = tf.placeholder(dtype=tf.float32, shape=(None, input_dim), name="flat_input")
if feature_network is None:
feature_var = flat_input_var
else:
feature_var = L.get_output(l_flat_feature, {feature_network.input_layer: flat_input_var})
self.f_step_prob = tensor_utils.compile_function(
[
flat_input_var,
prob_network.step_prev_hidden_layer.input_var
],
L.get_output([
prob_network.step_output_layer,
prob_network.step_hidden_layer
], {prob_network.step_input_layer: feature_var})
)
self.input_dim = input_dim
self.action_dim = action_dim
self.hidden_dim = hidden_dim
self.prev_actions = None
self.prev_hiddens = None
self.dist = RecurrentCategorical(env_spec.action_space.n)
out_layers = [prob_network.output_layer]
if feature_network is not None:
out_layers.append(feature_network.output_layer)
LayersPowered.__init__(self, out_layers)
@overrides
def dist_info_sym(self, obs_var, state_info_vars):
n_batches = tf.shape(obs_var)[0]
n_steps = tf.shape(obs_var)[1]
obs_var = tf.reshape(obs_var, tf.pack([n_batches, n_steps, -1]))
obs_var = tf.cast(obs_var, tf.float32)
if self.state_include_action:
prev_action_var = tf.cast(state_info_vars["prev_action"], tf.float32)
all_input_var = tf.concat(axis=2, values=[obs_var, prev_action_var])
else:
all_input_var = obs_var
if self.feature_network is None:
return dict(
prob=L.get_output(
self.prob_network.output_layer,
{self.l_input: all_input_var}
)
)
else:
flat_input_var = tf.reshape(all_input_var, (-1, self.input_dim))
return dict(
prob=L.get_output(
self.prob_network.output_layer,
{self.l_input: all_input_var, self.feature_network.input_layer: flat_input_var}
)
)
@property
def vectorized(self):
return True
def reset(self, dones=None):
if dones is None:
dones = [True]
dones = np.asarray(dones)
if self.prev_actions is None or len(dones) != len(self.prev_actions):
self.prev_actions = np.zeros((len(dones), self.action_space.flat_dim))
self.prev_hiddens = np.zeros((len(dones), self.hidden_dim))
self.prev_actions[dones] = 0.
self.prev_hiddens[dones] = self.prob_network.hid_init_param.eval() # get_value()
# The return value is a pair. The first item is a matrix (N, A), where each
# entry corresponds to the action value taken. The second item is a vector
# of length N, where each entry is the density value for that action, under
# the current policy
@overrides
def get_action(self, observation):
actions, agent_infos = self.get_actions([observation])
return actions[0], {k: v[0] for k, v in agent_infos.items()}
@overrides
def get_actions(self, observations):
flat_obs = self.observation_space.flatten_n(observations)
if self.state_include_action:
assert self.prev_actions is not None
all_input = np.concatenate([
flat_obs,
self.prev_actions
], axis=-1)
else:
all_input = flat_obs
probs, hidden_vec = self.f_step_prob(all_input, self.prev_hiddens)
actions = special.weighted_sample_n(probs, np.arange(self.action_space.n))
prev_actions = self.prev_actions
self.prev_actions = self.action_space.flatten_n(actions)
self.prev_hiddens = hidden_vec
agent_info = dict(prob=probs)
if self.state_include_action:
agent_info["prev_action"] = np.copy(prev_actions)
return actions, agent_info
@property
@overrides
def recurrent(self):
return True
@property
def distribution(self):
return self.dist
@property
def state_info_specs(self):
if self.state_include_action:
return [
("prev_action", (self.action_dim,)),
]
else:
return []
| 37.302439
| 105
| 0.598928
|
7b8559c815140b9b7b55c4d4400eac932a3e981e
| 80
|
py
|
Python
|
thesis_analysis/thesis_analysis.py
|
egpbos/thesis_analysis
|
01d0dfed0d69941526edc0a20aeaa4cd85fb81c4
|
[
"MIT"
] | null | null | null |
thesis_analysis/thesis_analysis.py
|
egpbos/thesis_analysis
|
01d0dfed0d69941526edc0a20aeaa4cd85fb81c4
|
[
"MIT"
] | null | null | null |
thesis_analysis/thesis_analysis.py
|
egpbos/thesis_analysis
|
01d0dfed0d69941526edc0a20aeaa4cd85fb81c4
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# FIXME: put actual code here
def example():
pass
| 11.428571
| 29
| 0.575
|
ce4dee45ddd6436c261a9ebe23c8d317c995875d
| 1,590
|
py
|
Python
|
examples/event_generation/parton_shower_gen.py
|
samcaf/JetMonteCarlo
|
71f50f3bb53a4f68ed927eaeaed5ee258da0dd34
|
[
"MIT"
] | null | null | null |
examples/event_generation/parton_shower_gen.py
|
samcaf/JetMonteCarlo
|
71f50f3bb53a4f68ed927eaeaed5ee258da0dd34
|
[
"MIT"
] | null | null | null |
examples/event_generation/parton_shower_gen.py
|
samcaf/JetMonteCarlo
|
71f50f3bb53a4f68ed927eaeaed5ee258da0dd34
|
[
"MIT"
] | null | null | null |
from __future__ import absolute_import
# Loading parton shower class and parameters:
from jetmontecarlo.montecarlo.partonshower import *
from examples.params import *
# Run with 1) false fixed coupling, LL observable, then 2) false fixed coupling, MU_NP, MLL
# then 3) false fixed coupling, MU_NP, MLL for each beta
####################################
# Example parton shower usage:
####################################
# Trying to loop over betas for now:
# =====================================
# Initializing the Shower:
# =====================================
# Showers are ordered by an angularity e_beta
# Arguments are:
# * the accuracy of the coupling;
# * the cutoff angularity, at which the shower stops;
# * the value of beta for the angularity e_beta which orders the shower;
# * the type of parton initiating the parton shower.
shower = parton_shower(fixed_coupling=FIXED_COUPLING,
shower_cutoff=SHOWER_CUTOFF,
shower_beta=SHOWER_BETA,
jet_type=JET_TYPE)
# =====================================
# Generating or Loading Events:
# =====================================
shower.gen_events(NUM_SHOWER_EVENTS)
shower.save_events()
#shower.load_events(NUM_SHOWER_EVENTS)
for beta in BETAS:
# =====================================
# Saving Jet Observables:
# =====================================
shower.save_correlations(beta, OBS_ACC, f_soft=1)
shower.save_correlations(beta, OBS_ACC, f_soft=.75)
shower.save_correlations(beta, OBS_ACC, f_soft=.5)
print()
| 36.976744
| 91
| 0.576101
|
94490c146ae16b22b405b9bf9cf79b4337614551
| 537,332
|
py
|
Python
|
DearPyGui/dearpygui/dearpygui.py
|
Treygec/DearPyGui
|
3421291b0ac93f6e4f2a936501d7140feb8f6b2f
|
[
"MIT"
] | null | null | null |
DearPyGui/dearpygui/dearpygui.py
|
Treygec/DearPyGui
|
3421291b0ac93f6e4f2a936501d7140feb8f6b2f
|
[
"MIT"
] | null | null | null |
DearPyGui/dearpygui/dearpygui.py
|
Treygec/DearPyGui
|
3421291b0ac93f6e4f2a936501d7140feb8f6b2f
|
[
"MIT"
] | null | null | null |
##########################################################
# Dear PyGui User Interface
# ~ Version: master
#
# Notes:
# * This file is automatically generated.
#
# Resources:
# * FAQ: https://github.com/hoffstadt/DearPyGui/discussions/categories/frequently-asked-questions-faq
# * Homepage: https://github.com/hoffstadt/DearPyGui
# * Wiki: https://github.com/hoffstadt/DearPyGui/wiki
# * Issues: https://github.com/hoffstadt/DearPyGui/issues
# * Discussions: https://github.com/hoffstadt/DearPyGui/discussions
##########################################################
from typing import List, Any, Callable, Union, Tuple
from contextlib import contextmanager
import warnings
import functools
import inspect
import dearpygui._dearpygui as internal_dpg
from dearpygui._dearpygui import mvBuffer
from dearpygui._dearpygui import mvVec4
from dearpygui._dearpygui import mvMat4
########################################################################################################################
# User API Index
#
# * Sections
# - Helper Commands
# - Tool Commands
# - Information Commands
# - Configuration Getter Commands
# - Configuration Setter Commands
# - State Commands
# - Viewport Setter Commands
# - Viewport Getter Commands
# - Deprecated Commands
# - Container Context Managers
# - Public _dearpygui Wrappings
# - Constants
#
########################################################################################################################
########################################################################################################################
# Helper Commands
########################################################################################################################
def run_callbacks(jobs):
""" New in 1.2. Runs callbacks from the callback queue and checks arguments. """
if jobs is None:
pass
else:
for job in jobs:
if job[0] is None:
pass
else:
sig = inspect.signature(job[0])
args = []
for arg in range(len(sig.parameters)):
args.append(job[arg+1])
job[0](*args)
def get_major_version():
""" return Dear PyGui Major Version """
return internal_dpg.get_app_configuration()["major_version"]
def get_minor_version():
""" return Dear PyGui Minor Version """
return internal_dpg.get_app_configuration()["minor_version"]
def get_dearpygui_version():
""" return Dear PyGui Version """
return internal_dpg.get_app_configuration()["version"]
def configure_item(item : Union[int, str], **kwargs) -> None:
"""Configures an item after creation."""
internal_dpg.configure_item(item, **kwargs)
def configure_app(**kwargs) -> None:
"""Configures an item after creation."""
internal_dpg.configure_app(**kwargs)
def configure_viewport(item : Union[int, str], **kwargs) -> None:
"""Configures a viewport after creation."""
internal_dpg.configure_viewport(item, **kwargs)
def start_dearpygui():
"""Prepares viewport (if not done already). sets up, cleans up, and runs main event loop.
Returns:
None
"""
if not internal_dpg.is_viewport_ok():
raise RuntimeError("Viewport was not created and shown.")
return
while(internal_dpg.is_dearpygui_running()):
internal_dpg.render_dearpygui_frame()
@contextmanager
def mutex():
""" Handles locking/unlocking render thread mutex. """
try:
yield internal_dpg.lock_mutex()
finally:
internal_dpg.unlock_mutex()
@contextmanager
def popup(parent: Union[int, str], mousebutton: int = internal_dpg.mvMouseButton_Right, modal: bool=False, tag:Union[int, str]=0, min_size:Union[List[int], Tuple[int, ...]]=[100,100], max_size: Union[List[int], Tuple[int, ...]] =[30000, 30000], no_move: bool=False, no_background: bool=False) -> int:
"""A window that will be displayed when a parent item is hovered and the corresponding mouse button has been clicked. By default a popup will shrink fit the items it contains.
This is useful for context windows, and simple modal window popups.
When popups are used a modal they have more avaliable settings (i.e. title, resize, width, height) These
can be set by using configure item.
This is a light wrapper over window. For more control over a modal|popup window use a normal window with the modal|popup keyword
and set the item handler and mouse events manually.
Args:
parent: The UI item that will need to be hovered.
**mousebutton: The mouse button that will trigger the window to popup.
**modal: Will force the user to interact with the popup.
**min_size: New in 1.4. Minimum window size.
**max_size: New in 1.4. Maximum window size.
**no_move: New in 1.4. Prevents the window from moving based on user input.
**no_background: New in 1.4. Sets Background and border alpha to transparent.
Returns:
item's uuid
"""
try:
if tag == 0:
_internal_popup_id = internal_dpg.generate_uuid()
else:
_internal_popup_id = tag
_handler_reg_id = internal_dpg.add_item_handler_registry()
internal_dpg.add_item_clicked_handler(mousebutton, parent=internal_dpg.last_item(), callback=lambda: internal_dpg.configure_item(_internal_popup_id, show=True))
internal_dpg.bind_item_handler_registry(parent, _handler_reg_id)
if modal:
internal_dpg.add_window(modal=True, show=False, tag=_internal_popup_id, autosize=True, min_size=min_size, max_size=max_size, no_move=no_move, no_background=no_background)
else:
internal_dpg.add_window(popup=True, show=False, tag=_internal_popup_id, autosize=True, min_size=min_size, max_size=max_size, no_move=no_move, no_background=no_background)
internal_dpg.push_container_stack(internal_dpg.last_container())
yield _internal_popup_id
finally:
internal_dpg.pop_container_stack()
########################################################################################################################
# Tool Commands
########################################################################################################################
def show_style_editor() -> None:
"""Shows the standard style editor window
Returns:
None
"""
internal_dpg.show_tool(internal_dpg.mvTool_Style)
def show_metrics() -> None:
"""Shows the standard metrics window
Returns:
None
"""
internal_dpg.show_tool(internal_dpg.mvTool_Metrics)
def show_about() -> None:
"""Shows the standard about window
Returns:
None
"""
internal_dpg.show_tool(internal_dpg.mvTool_About)
def show_debug() -> None:
"""Shows the standard debug window
Returns:
None
"""
internal_dpg.show_tool(internal_dpg.mvTool_Debug)
def show_documentation() -> None:
"""Shows the standard documentation window
Returns:
None
"""
internal_dpg.show_tool(internal_dpg.mvTool_Doc)
def show_font_manager() -> None:
"""Shows the standard documentation window
Returns:
None
"""
internal_dpg.show_tool(internal_dpg.mvTool_Font)
def show_item_registry() -> None:
"""Shows the standard documentation window
Returns:
None
"""
internal_dpg.show_tool(internal_dpg.mvTool_ItemRegistry)
########################################################################################################################
# Information Commands
########################################################################################################################
def get_item_slot(item: Union[int, str]) -> Union[int, None]:
"""Returns an item's target slot.
Returns:
slot as a int
"""
return internal_dpg.get_item_info(item)["target"]
def is_item_container(item: Union[int, str]) -> Union[bool, None]:
"""Checks if item is a container.
Returns:
status as a bool
"""
return internal_dpg.get_item_info(item)["container"]
def get_item_parent(item: Union[int, str]) -> Union[int, None]:
"""Gets the item's parent.
Returns:
parent as a int or None
"""
return internal_dpg.get_item_info(item)["parent"]
def get_item_children(item: Union[int, str] , slot: int = -1) -> Union[dict, List[int], None]:
"""Provides access to the item's children slots.
Returns:
A 2-D tuple of children slots ex. ((child_slot_1),(child_slot_2),(child_slot_3),...) or a single slot if slot is used.
"""
if slot < 0 or slot > 4:
return internal_dpg.get_item_info(item)["children"]
return internal_dpg.get_item_info(item)["children"][slot]
def get_item_type(item: Union[int, str]) -> Union[str]:
"""Gets the item's type.
Returns:
type as a string or None
"""
return internal_dpg.get_item_info(item)["type"]
def get_item_theme(item: Union[int, str]) -> int:
"""Gets the item's theme.
Returns:
theme's uuid
"""
return internal_dpg.get_item_info(item)["theme"]
def get_item_font(item: Union[int, str]) -> int:
"""Gets the item's font.
Returns:
font's uuid
"""
return internal_dpg.get_item_info(item)["font"]
def get_item_disabled_theme(item: Union[int, str]) -> int:
"""Gets the item's disabled theme.
Returns:
theme's uuid
"""
return internal_dpg.get_item_info(item)["disabled_theme"]
########################################################################################################################
# Configuration Setter Commands
########################################################################################################################
def enable_item(item: Union[int, str]):
"""Enables the item.
Args:
**item: Item to enable.
Returns:
None
"""
internal_dpg.configure_item(item, enabled=True)
def disable_item(item: Union[int, str]):
"""Disables the item.
Args:
**item: Item to disable.
Returns:
None
"""
internal_dpg.configure_item(item, enabled=False)
def set_item_label(item: Union[int, str], label: str):
"""Sets the item's displayed label, anything after the characters "##" in the name will not be shown.
Args:
item: Item label will be applied to.
label: Displayed name to be applied.
Returns:
None
"""
internal_dpg.configure_item(item, label=label)
def set_item_source(item: Union[int, str], source: Union[int, str]):
"""Sets the item's value, to the source's value. Widget's value will now be "linked" to source's value.
Args:
item: Item to me linked.
source: Source to link to.
Returns:
None
"""
internal_dpg.configure_item(item, source=source)
def set_item_pos(item: Union[int, str], pos: List[float]):
"""Sets the item's position.
Args:
item: Item the absolute position will be applied to.
pos: X and Y positions relative to parent of the item.
Returns:
None
"""
internal_dpg.configure_item(item, pos=pos)
def set_item_width(item: Union[int, str], width: int):
"""Sets the item's width.
Args:
item: Item the Width will be applied to.
width: Width to be applied.
Returns:
None
"""
internal_dpg.configure_item(item, width=width)
def set_item_height(item: Union[int, str], height: int):
"""Sets the item's height.
Args:
item: Item the Height will be applied to.
height: Height to be applied.
Returns:
None
"""
internal_dpg.configure_item(item, height=height)
def set_item_indent(item: Union[int, str], indent: int):
"""Sets the item's indent.
Args:
item: Item the Height will be applied to.
height: Height to be applied.
Returns:
None
"""
internal_dpg.configure_item(item, indent=indent)
def set_item_track_offset(item: Union[int, str], offset: float):
"""Sets the item's track offset.
Args:
item: Item the Height will be applied to.
height: Height to be applied.
Returns:
None
"""
internal_dpg.configure_item(item, track_offset=offset)
def set_item_payload_type(item: Union[int, str], payload_type: str):
"""Sets the item's payload type.
Args:
item: Item the Height will be applied to.
height: Height to be applied.
Returns:
None
"""
internal_dpg.configure_item(item, payload_type=str)
def set_item_callback(item: Union[int, str], callback: Callable):
"""Sets the item's callack.
Args:
item: Item the callback will be applied to.
callback: Callback to be applied.
Returns:
None
"""
internal_dpg.configure_item(item, callback=callback)
def set_item_drag_callback(item: Union[int, str], callback: Callable):
"""Sets the item's drag callack.
Args:
item: Item the callback will be applied to.
callback: Callback to be applied.
Returns:
None
"""
internal_dpg.configure_item(item, drag_callback=callback)
def set_item_drop_callback(item: Union[int, str], callback: Callable):
"""Sets the item's drop callack.
Args:
item: Item the callback will be applied to.
callback: Callback to be applied.
Returns:
None
"""
internal_dpg.configure_item(item, drop_callback=callback)
def track_item(item: Union[int, str]):
"""Track item in scroll region.
Args:
item: Item the callback will be applied to.
callback: Callback to be applied.
Returns:
None
"""
internal_dpg.configure_item(item, tracked=True)
def untrack_item(item: Union[int, str]):
"""Track item in scroll region.
Args:
item: Item the callback will be applied to.
callback: Callback to be applied.
Returns:
None
"""
internal_dpg.configure_item(item, tracked=False)
def set_item_user_data(item: Union[int, str], user_data: Any):
"""Sets the item's callack_data to any python object.
Args:
item: Item the callback will be applied to.
user_data: Callback_data to be applied.
Returns:
None
"""
internal_dpg.configure_item(item, user_data=user_data)
def show_item(item: Union[int, str]):
"""Shows the item.
Args:
item: Item to show.
Returns:
None
"""
internal_dpg.configure_item(item, show=True)
def hide_item(item: Union[int, str], *, children_only: bool = False):
"""Hides the item.
Args:
**item: Item to hide.
Returns:
None
"""
if children_only:
children = get_item_children(item)
for child in children:
internal_dpg.configure_item(child, show=False)
else:
internal_dpg.configure_item(item, show=False)
########################################################################################################################
# Configuration Getter Commands
########################################################################################################################
def get_item_label(item: Union[int, str]) -> Union[str, None]:
"""Gets the item's label.
Returns:
label as a string or None
"""
return internal_dpg.get_item_configuration(item)["label"]
def get_item_filter_key(item: Union[int, str]) -> Union[str, None]:
"""Gets the item's filter key.
Returns:
filter key as a string or None
"""
return internal_dpg.get_item_configuration(item)["filter_key"]
def is_item_tracked(item: Union[int, str]) -> Union[bool, None]:
"""Checks if item is tracked.
Returns:
tracked as a bool or None
"""
return internal_dpg.get_item_configuration(item)["tracked"]
def is_item_search_delayed(item: Union[int, str]) -> Union[bool, None]:
"""Checks if item is search delayed.
Returns:
tracked as a bool or None
"""
return internal_dpg.get_item_configuration(item)["delay_search"]
def get_item_indent(item: Union[int, str]) -> Union[int, None]:
"""Gets the item's indent.
Returns:
indent as a int or None
"""
return internal_dpg.get_item_configuration(item)["indent"]
def get_item_track_offset(item: Union[int, str]) -> Union[float, None]:
"""Gets the item's track offset.
Returns:
track offset as a int or None
"""
return internal_dpg.get_item_configuration(item)["track_offset"]
def get_item_width(item: Union[int, str]) -> Union[int, None]:
"""Gets the item's width.
Returns:
width as a int or None
"""
return internal_dpg.get_item_configuration(item)["width"]
def get_item_height(item: Union[int, str]) -> Union[int, None]:
"""Gets the item's height.
Returns:
height as a int or None
"""
return internal_dpg.get_item_configuration(item)["height"]
def get_item_callback(item: Union[int, str]) -> Union[Callable, None]:
"""Gets the item's callback.
Returns:
callback as a callable or None
"""
return internal_dpg.get_item_configuration(item)["callback"]
def get_item_drag_callback(item: Union[int, str]) -> Union[Callable, None]:
"""Gets the item's drag callback.
Returns:
callback as a callable or None
"""
return internal_dpg.get_item_configuration(item)["drag_callback"]
def get_item_drop_callback(item: Union[int, str]) -> Union[Callable, None]:
"""Gets the item's drop callback.
Returns:
callback as a callable or None
"""
return internal_dpg.get_item_configuration(item)["drop_callback"]
def get_item_user_data(item: Union[int, str]) -> Union[Any, None]:
"""Gets the item's callback data.
Returns:
callback data as a python object or None
"""
return internal_dpg.get_item_configuration(item)["user_data"]
def get_item_source(item: Union[int, str]) -> Union[str, None]:
"""Gets the item's source.
Returns:
source as a string or None
"""
return internal_dpg.get_item_configuration(item)["source"]
########################################################################################################################
# State Commands
########################################################################################################################
def is_item_hovered(item: Union[int, str]) -> Union[bool, None]:
"""Checks if item is hovered.
Returns:
status as a bool
"""
return internal_dpg.get_item_state(item)["hovered"]
def is_item_active(item: Union[int, str]) -> Union[bool, None]:
"""Checks if item is active.
Returns:
status as a bool
"""
return internal_dpg.get_item_state(item)["active"]
def is_item_focused(item: Union[int, str]) -> Union[bool, None]:
"""Checks if item is focused.
Returns:
status as a bool
"""
return internal_dpg.get_item_state(item)["focused"]
def is_item_clicked(item: Union[int, str]) -> Union[bool, None]:
"""Checks if item is clicked.
Returns:
status as a bool
"""
return internal_dpg.get_item_state(item)["clicked"]
def is_item_left_clicked(item: Union[int, str]) -> Union[bool, None]:
"""Checks if item is left clicked.
Returns:
status as a bool
"""
return internal_dpg.get_item_state(item)["left_clicked"]
def is_item_right_clicked(item: Union[int, str]) -> Union[bool, None]:
"""Checks if item is right clicked.
Returns:
status as a bool
"""
return internal_dpg.get_item_state(item)["right_clicked"]
def is_item_middle_clicked(item: Union[int, str]) -> Union[bool, None]:
"""Checks if item is middle clicked.
Returns:
status as a bool
"""
return internal_dpg.get_item_state(item)["middle_clicked"]
def is_item_visible(item: Union[int, str]) -> Union[bool, None]:
"""Checks if item is visible.
Returns:
status as a bool
"""
return internal_dpg.get_item_state(item)["visible"]
def is_item_edited(item: Union[int, str]) -> Union[bool, None]:
"""Checks if item is edited.
Returns:
status as a bool
"""
return internal_dpg.get_item_state(item)["edited"]
def is_item_activated(item: Union[int, str]) -> Union[bool, None]:
"""Checks if item is activated.
Returns:
status as a bool
"""
return internal_dpg.get_item_state(item)["activated"]
def is_item_deactivated(item: Union[int, str]) -> Union[bool, None]:
"""Checks if item is deactivated.
Returns:
status as a bool
"""
return internal_dpg.get_item_state(item)["deactivated"]
def is_item_deactivated_after_edit(item: Union[int, str]) -> Union[bool, None]:
"""Checks if item is deactivated_after_edit.
Returns:
status as a bool
"""
return internal_dpg.get_item_state(item)["deactivated_after_edit"]
def is_item_toggled_open(item: Union[int, str]) -> Union[bool, None]:
"""Checks if item is toggled_open.
Returns:
status as a bool
"""
return internal_dpg.get_item_state(item)["toggled_open"]
def is_item_ok(item: Union[int, str]) -> Union[bool, None]:
"""Checks if item is ok and can be used.
Returns:
status as a bool
"""
return internal_dpg.get_item_state(item)["ok"]
def is_item_shown(item: Union[int, str]) -> Union[bool, None]:
"""Checks if item is shown.
Returns:
status as a bool
"""
return internal_dpg.get_item_configuration(item)["show"]
def is_item_enabled(item: Union[int, str]) -> Union[bool, None]:
"""Checks if item is enabled.
Returns:
status as a bool
"""
return internal_dpg.get_item_configuration(item)["enabled"]
def get_item_pos(item: Union[int, str]) -> List[int]:
"""Returns item's position.
Returns:
position
"""
return internal_dpg.get_item_state(item)["pos"]
def get_available_content_region(item: Union[int, str]) -> List[int]:
"""Returns item's available content region.
Returns:
position
"""
return internal_dpg.get_item_state(item)["content_region_avail"]
def get_item_rect_size(item: Union[int, str]) -> List[int]:
"""Returns item's available content region.
Returns:
position
"""
return internal_dpg.get_item_state(item)["rect_size"]
def get_item_rect_min(item: Union[int, str]) -> List[int]:
"""Returns item's minimum content region.
Returns:
position
"""
return internal_dpg.get_item_state(item)["rect_min"]
def get_item_rect_max(item: Union[int, str]) -> List[int]:
"""Returns item's maximum content region.
Returns:
position
"""
return internal_dpg.get_item_state(item)["rect_max"]
########################################################################################################################
# Viewport Setter Commands
########################################################################################################################
def set_viewport_clear_color(color: List[int]):
"""Sets the viewport's clear color.
Returns:
None
"""
internal_dpg.configure_viewport(0, clear_color=color)
def set_viewport_small_icon(icon: str):
"""Sets the viewport's small icon. Must be ico for windows.
Returns:
None
"""
internal_dpg.configure_viewport(0, small_icon=icon)
def set_viewport_large_icon(icon: str):
"""Sets the viewport's small icon. Must be ico for windows.
Returns:
None
"""
internal_dpg.configure_viewport(0, large_icon=icon)
def set_viewport_pos(pos: List[float]):
"""Sets the viewport's position.
Returns:
None
"""
internal_dpg.configure_viewport(0, x_pos=pos[0], y_pos=pos[1])
def set_viewport_width(width: int):
"""Sets the viewport's width.
Returns:
None
"""
internal_dpg.configure_viewport(0, width=width)
def set_viewport_height(height: int):
"""Sets the viewport's height.
Returns:
None
"""
internal_dpg.configure_viewport(0, height=height)
def set_viewport_min_width(width: int):
"""Sets the viewport's minimum width.
Returns:
None
"""
internal_dpg.configure_viewport(0, min_width=width)
def set_viewport_max_width(width: int):
"""Sets the viewport's max width.
Returns:
None
"""
internal_dpg.configure_viewport(0, max_width=width)
def set_viewport_min_height(height: int):
"""Sets the viewport's minimum height.
Returns:
None
"""
internal_dpg.configure_viewport(0, min_height=height)
def set_viewport_max_height(height: int):
"""Sets the viewport's max width.
Returns:
None
"""
internal_dpg.configure_viewport(0, max_height=height)
def set_viewport_title(title: str):
"""Sets the viewport's title.
Returns:
None
"""
internal_dpg.configure_viewport(0, title=title)
def set_viewport_always_top(value: bool):
"""Sets the viewport always on top.
Returns:
None
"""
internal_dpg.configure_viewport(0, always_on_top=value)
def set_viewport_resizable(value: bool):
"""Sets the viewport resizable.
Returns:
None
"""
internal_dpg.configure_viewport(0, resizable=value)
def set_viewport_vsync(value: bool):
"""Sets the viewport vsync.
Returns:
None
"""
internal_dpg.configure_viewport(0, vsync=value)
def set_viewport_decorated(value: bool):
"""Sets the viewport to be decorated.
Returns:
None
"""
internal_dpg.configure_viewport(0, decorated=value)
########################################################################################################################
# Viewport Getter Commands
########################################################################################################################
def get_viewport_clear_color() ->List[int]:
"""Gets the viewport's clear color.
Returns:
List[int]
"""
return internal_dpg.get_viewport_configuration()["clear_color"]
def get_viewport_pos() ->List[float]:
"""Gets the viewport's position.
Returns:
viewport position.
"""
config = internal_dpg.get_viewport_configuration()
x_pos = config["x_pos"]
y_pos = config["y_pos"]
return [x_pos, y_pos]
def get_viewport_width() -> int:
"""Gets the viewport's width.
Returns:
viewport width
"""
return internal_dpg.get_viewport_configuration()["width"]
def get_viewport_client_width() -> int:
"""Gets the viewport's client width.
Returns:
viewport width
"""
return internal_dpg.get_viewport_configuration()["client_width"]
def get_viewport_client_height() -> int:
"""Gets the viewport's client height.
Returns:
viewport width
"""
return internal_dpg.get_viewport_configuration()["client_height"]
def get_viewport_height() -> int:
"""Gets the viewport's height.
Returns:
int
"""
return internal_dpg.get_viewport_configuration()["height"]
def get_viewport_min_width() -> int:
"""Gets the viewport's minimum width.
Returns:
int
"""
return internal_dpg.get_viewport_configuration()["min_width"]
def get_viewport_max_width() -> int:
"""Gets the viewport's max width.
Returns:
int
"""
return internal_dpg.get_viewport_configuration()["max_width"]
def get_viewport_min_height() -> int:
"""Gets the viewport's minimum height.
Returns:
int
"""
return internal_dpg.get_viewport_configuration()["min_height"]
def get_viewport_max_height() -> int:
"""Gets the viewport's max width.
Returns:
int
"""
return internal_dpg.get_viewport_configuration()["max_height"]
def get_viewport_title() -> str:
"""Gets the viewport's title.
Returns:
str
"""
return internal_dpg.get_viewport_configuration()["title"]
def is_viewport_always_top() -> bool:
"""Checks the viewport always on top flag.
Returns:
bool
"""
return internal_dpg.get_viewport_configuration()["always_on_top"]
def is_viewport_resizable() -> bool:
"""Checks the viewport resizable flag.
Returns:
bool
"""
return internal_dpg.get_viewport_configuration()["resizable"]
def is_viewport_vsync_on() -> bool:
"""Checks the viewport vsync flag.
Returns:
bool
"""
return internal_dpg.get_viewport_configuration()["vsync"]
def is_viewport_decorated() -> bool:
"""Checks if the viewport is docorated.
Returns:
bool
"""
return internal_dpg.get_viewport_configuration()["decorated"]
##########################################################
# Deprecated Commands
##########################################################
def deprecated(reason):
string_types = (type(b''), type(u''))
if isinstance(reason, string_types):
def decorator(func1):
fmt1 = "Call to deprecated function {name} ({reason})."
@functools.wraps(func1)
def new_func1(*args, **kwargs):
warnings.simplefilter('always', DeprecationWarning)
warnings.warn(
fmt1.format(name=func1.__name__, reason=reason),
category=DeprecationWarning,
stacklevel=2
)
warnings.simplefilter('default', DeprecationWarning)
return func1(*args, **kwargs)
return new_func1
return decorator
elif inspect.isfunction(reason):
func2 = reason
fmt2 = "Call to deprecated function {name}."
@functools.wraps(func2)
def new_func2(*args, **kwargs):
warnings.simplefilter('always', DeprecationWarning)
warnings.warn(
fmt2.format(name=func2.__name__),
category=DeprecationWarning,
stacklevel=2
)
warnings.simplefilter('default', DeprecationWarning)
return func2(*args, **kwargs)
return new_func2
@deprecated("Use 'configure_app(docking=True, docking_space=dock_space)'.")
def enable_docking(dock_space=False):
""" deprecated function """
internal_dpg.configure_app(docking=True, docking_space=dock_space)
@deprecated("Use 'configure_app(init_file=file)'.")
def set_init_file(file="dpg.ini"):
""" deprecated function """
internal_dpg.configure_app(init_file=file)
@deprecated("Use 'configure_app(init_file=file, load_init_file=True)'.")
def load_init_file(file):
""" deprecated function """
internal_dpg.configure_app(init_file=file, load_init_file=True)
@deprecated("Use: `is_viewport_ok(...)`")
def is_viewport_created():
""" deprecated function """
return internal_dpg.is_viewport_ok()
@deprecated("Use: \ncreate_viewport()\nsetup_dearpygui()\nshow_viewport()")
def setup_viewport():
""" deprecated function """
internal_dpg.create_viewport()
internal_dpg.setup_dearpygui()
internal_dpg.show_viewport()
@deprecated("Use: `bind_item_theme(...)`")
def set_item_theme(item, theme):
""" deprecated function """
return internal_dpg.bind_item_theme(item, theme)
@deprecated("Use: `bind_item_type_disabled_theme(...)`")
def set_item_type_disabled_theme(item, theme):
""" deprecated function """
return internal_dpg.bind_item_type_disabled_theme(item, theme)
@deprecated("Use: `bind_item_type_theme(...)`")
def set_item_type_theme(item, theme):
""" deprecated function """
return internal_dpg.bind_item_type_theme(item, theme)
@deprecated("Use: `bind_item_font(...)`")
def set_item_font(item, font):
""" deprecated function """
return internal_dpg.bind_item_font(item, font)
@deprecated("Use: `add_item_activated_handler(...)`")
def add_activated_handler(parent, **kwargs):
""" deprecated function """
return internal_dpg.add_item_activated_handler(parent, **kwargs)
@deprecated("Use: `add_item_active_handler(...)`")
def add_active_handler(parent, **kwargs):
""" deprecated function """
return internal_dpg.add_item_active_handler(parent, **kwargs)
@deprecated("Use: `add_item_clicked_handler(...)`")
def add_clicked_handler(parent, button=-1, **kwargs):
""" deprecated function """
return internal_dpg.add_item_clicked_handler(parent, button, **kwargs)
@deprecated("Use: `add_item_deactived_after_edit_handler(...)`")
def add_deactivated_after_edit_handler(parent, **kwargs):
""" deprecated function """
return internal_dpg.add_item_deactivated_after_edit_handler(parent, **kwargs)
@deprecated("Use: `add_item_deactivated_handler(...)`")
def add_deactivated_handler(parent, **kwargs):
""" deprecated function """
return internal_dpg.add_item_deactivated_handler(parent, **kwargs)
@deprecated("Use: `add_item_edited_handler(...)`")
def add_edited_handler(parent, **kwargs):
""" deprecated function """
return internal_dpg.add_item_edited_handler(parent, **kwargs)
@deprecated("Use: `add_item_focus_handler(...)`")
def add_focus_handler(parent, **kwargs):
""" deprecated function """
return internal_dpg.add_item_focus_handler(parent, **kwargs)
@deprecated("Use: `add_item_hover_handler(...)`")
def add_hover_handler(parent, **kwargs):
""" deprecated function """
return internal_dpg.add_item_hover_handler(parent, **kwargs)
@deprecated("Use: `add_item_resize_handler(...)`")
def add_resize_handler(parent, **kwargs):
""" deprecated function """
return internal_dpg.add_item_resize_handler(parent, **kwargs)
@deprecated("Use: `add_item_toggled_open_handler(...)`")
def add_toggled_open_handler(parent, **kwargs):
""" deprecated function """
return internal_dpg.add_item_toggled_open_handler(parent, **kwargs)
@deprecated("Use: `add_item_visible_handler(...)`")
def add_visible_handler(parent, **kwargs):
""" deprecated function """
return internal_dpg.add_item_visible_handler(parent, **kwargs)
@deprecated("Use: `bind_colormap(...)`")
def set_colormap(item, source):
""" deprecated function """
return internal_dpg.bind_colormap(item, source)
@deprecated("Use: `bind_theme(0)`")
def reset_default_theme(item, source):
""" deprecated function """
return internal_dpg.bind_theme(item, source)
@deprecated
def set_staging_mode(mode):
""" deprecated function """
pass
@deprecated
def add_table_next_column(**kwargs):
""" deprecated function """
pass
@deprecated("Use: add_stage")
def add_staging_container(**kwargs):
""" deprecated function """
return internal_dpg.add_stage(**kwargs)
@deprecated("Use: stage")
@contextmanager
def staging_container(**kwargs):
"""
deprecated function
Args:
**label (str): Overrides 'name' as label.
**user_data (Any): User data for callbacks.
**use_internal_label (bool): Use generated internal label instead of user specified (appends ### uuid).
**id (Union[int, str]): Unique id used to programmatically refer to the item.If label is unused this will be the label.
Yields:
Union[int, str]
"""
try:
warnings.warn("'staging_container' is deprecated and was changed to 'stage'", DeprecationWarning, 2)
widget = internal_dpg.add_stage_container(**kwargs)
internal_dpg.push_container_stack(widget)
yield widget
finally:
internal_dpg.pop_container_stack()
@deprecated("Use: add_spacer(...)")
def add_spacing(**kwargs):
""" (deprecated function) Adds vertical spacing.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks.
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
indent (int, optional): Offsets the widget to the right the specified number multiplied by the indent style.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
show (bool, optional): Attempt to render widget.
pos (Union[List[int], Tuple[int]], optional): Places the item relative to window coordinates, [0,0] is top left.
count (int, optional): Number of spacings to add the size is dependant on the curret style.
Returns:
Union[int, str]
"""
if 'count' in kwargs.keys():
count = kwargs["count"]
kwargs.pop("count", None)
internal_dpg.add_group(**kwargs)
internal_dpg.push_container_stack(internal_dpg.last_container())
for i in range(count):
internal_dpg.add_spacer()
result_id = internal_dpg.pop_container_stack()
else:
result_id = internal_dpg.add_spacer(**kwargs)
return result_id
@deprecated("Use: add_spacer(...)")
def add_dummy(**kwargs):
""" (deprecated function) Adds a spacer or 'dummy' object.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks.
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
width (int, optional): Width of the item.
height (int, optional): Height of the item.
indent (int, optional): Offsets the widget to the right the specified number multiplied by the indent style.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
show (bool, optional): Attempt to render widget.
pos (Union[List[int], Tuple[int]], optional): Places the item relative to window coordinates, [0,0] is top left.
Returns:
Union[int, str]
"""
return internal_dpg.add_spacer(**kwargs)
@deprecated("Use: `destroy_context()`")
def cleanup_dearpygui():
""" deprecated function """
return internal_dpg.destroy_context()
@deprecated("Use: group(horizontal=True)")
def add_same_line(**kwargs):
""" deprecated function """
last_item = internal_dpg.last_item()
group = internal_dpg.add_group(horizontal=True, **kwargs)
internal_dpg.move_item(last_item, parent=group)
internal_dpg.capture_next_item(lambda s: internal_dpg.move_item(s, parent=group))
return group
@deprecated("Use: `add_child_window()`")
def add_child(**kwargs):
""" (deprecated function) Adds an embedded child window. Will show scrollbars when items do not fit.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
width (int, optional): Width of the item.
height (int, optional): Height of the item.
indent (int, optional): Offsets the widget to the right the specified number multiplied by the indent style.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
payload_type (str, optional): Sender string type must be the same as the target for the target to run the payload_callback.
drop_callback (Callable, optional): Registers a drop callback for drag and drop.
show (bool, optional): Attempt to render widget.
pos (Union[List[int], Tuple[int]], optional): Places the item relative to window coordinates, [0,0] is top left.
filter_key (str, optional): Used by filter widget.
delay_search (bool, optional): Delays searching container for specified items until the end of the app. Possible optimization when a container has many children that are not accessed often.
tracked (bool, optional): Scroll tracking
track_offset (float, optional): 0.0f:top, 0.5f:center, 1.0f:bottom
border (bool, optional): Shows/Hides the border around the sides.
autosize_x (bool, optional): Autosize the window to its parents size in x.
autosize_y (bool, optional): Autosize the window to its parents size in y.
no_scrollbar (bool, optional): Disable scrollbars (window can still scroll with mouse or programmatically).
horizontal_scrollbar (bool, optional): Allow horizontal scrollbar to appear (off by default).
menubar (bool, optional): Shows/Hides the menubar at the top.
Returns:
Union[int, str]
"""
return internal_dpg.add_child_window(**kwargs)
@deprecated("Use: `child_window()`")
@contextmanager
def child(**kwargs):
""" (deprecated function) Adds an embedded child window. Will show scrollbars when items do not fit.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
width (int, optional): Width of the item.
height (int, optional): Height of the item.
indent (int, optional): Offsets the widget to the right the specified number multiplied by the indent style.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
payload_type (str, optional): Sender string type must be the same as the target for the target to run the payload_callback.
drop_callback (Callable, optional): Registers a drop callback for drag and drop.
show (bool, optional): Attempt to render widget.
pos (Union[List[int], Tuple[int]], optional): Places the item relative to window coordinates, [0,0] is top left.
filter_key (str, optional): Used by filter widget.
delay_search (bool, optional): Delays searching container for specified items until the end of the app. Possible optimization when a container has many children that are not accessed often.
tracked (bool, optional): Scroll tracking
track_offset (float, optional): 0.0f:top, 0.5f:center, 1.0f:bottom
border (bool, optional): Shows/Hides the border around the sides.
autosize_x (bool, optional): Autosize the window to its parents size in x.
autosize_y (bool, optional): Autosize the window to its parents size in y.
no_scrollbar (bool, optional): Disable scrollbars (window can still scroll with mouse or programmatically).
horizontal_scrollbar (bool, optional): Allow horizontal scrollbar to appear (off by default).
menubar (bool, optional): Shows/Hides the menubar at the top.
Yields:
Union[int, str]
"""
try:
widget = internal_dpg.add_child_window(**kwargs)
internal_dpg.push_container_stack(widget)
yield widget
finally:
internal_dpg.pop_container_stack()
@deprecated("Use: Just not recommended")
def setup_registries() -> None:
"""Adds default registries for fonts, handlers, textures, colormaps, and values."""
internal_dpg.add_font_registry(tag=internal_dpg.mvReservedUUID_0)
internal_dpg.add_handler_registry(tag=internal_dpg.mvReservedUUID_1)
internal_dpg.add_texture_registry(tag=internal_dpg.mvReservedUUID_2)
internal_dpg.add_value_registry(tag=internal_dpg.mvReservedUUID_3)
internal_dpg.add_colormap_registry(tag=internal_dpg.mvReservedUUID_4)
@deprecated("Use: `set_frame_callback()`")
def set_start_callback(callback):
""" deprecated function """
return internal_dpg.set_frame_callback(3, callback)
##########################################################
# Container Context Managers
##########################################################
@contextmanager
def child_window(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, width: int =0, height: int =0, indent: int =-1, parent: Union[int, str] =0, before: Union[int, str] =0, payload_type: str ='$$DPG_PAYLOAD', drop_callback: Callable =None, show: bool =True, pos: Union[List[int], Tuple[int, ...]] =[], filter_key: str ='', delay_search: bool =False, tracked: bool =False, track_offset: float =0.5, border: bool =True, autosize_x: bool =False, autosize_y: bool =False, no_scrollbar: bool =False, horizontal_scrollbar: bool =False, menubar: bool =False, **kwargs) -> Union[int, str]:
""" Adds an embedded child window. Will show scrollbars when items do not fit.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
width (int, optional): Width of the item.
height (int, optional): Height of the item.
indent (int, optional): Offsets the widget to the right the specified number multiplied by the indent style.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
payload_type (str, optional): Sender string type must be the same as the target for the target to run the payload_callback.
drop_callback (Callable, optional): Registers a drop callback for drag and drop.
show (bool, optional): Attempt to render widget.
pos (Union[List[int], Tuple[int, ...]], optional): Places the item relative to window coordinates, [0,0] is top left.
filter_key (str, optional): Used by filter widget.
delay_search (bool, optional): Delays searching container for specified items until the end of the app. Possible optimization when a container has many children that are not accessed often.
tracked (bool, optional): Scroll tracking
track_offset (float, optional): 0.0f:top, 0.5f:center, 1.0f:bottom
border (bool, optional): Shows/Hides the border around the sides.
autosize_x (bool, optional): Autosize the window to its parents size in x.
autosize_y (bool, optional): Autosize the window to its parents size in y.
no_scrollbar (bool, optional): Disable scrollbars (window can still scroll with mouse or programmatically).
horizontal_scrollbar (bool, optional): Allow horizontal scrollbar to appear (off by default).
menubar (bool, optional): Shows/Hides the menubar at the top.
id (Union[int, str], optional): (deprecated)
Yields:
Union[int, str]
"""
try:
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
widget = internal_dpg.add_child_window(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, width=width, height=height, indent=indent, parent=parent, before=before, payload_type=payload_type, drop_callback=drop_callback, show=show, pos=pos, filter_key=filter_key, delay_search=delay_search, tracked=tracked, track_offset=track_offset, border=border, autosize_x=autosize_x, autosize_y=autosize_y, no_scrollbar=no_scrollbar, horizontal_scrollbar=horizontal_scrollbar, menubar=menubar, **kwargs)
internal_dpg.push_container_stack(widget)
yield widget
finally:
internal_dpg.pop_container_stack()
@contextmanager
def clipper(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, width: int =0, indent: int =-1, parent: Union[int, str] =0, before: Union[int, str] =0, show: bool =True, delay_search: bool =False, **kwargs) -> Union[int, str]:
""" Helper to manually clip large list of items. Increases performance by not searching or drawing widgets outside of the clipped region.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
width (int, optional): Width of the item.
indent (int, optional): Offsets the widget to the right the specified number multiplied by the indent style.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
show (bool, optional): Attempt to render widget.
delay_search (bool, optional): Delays searching container for specified items until the end of the app. Possible optimization when a container has many children that are not accessed often.
id (Union[int, str], optional): (deprecated)
Yields:
Union[int, str]
"""
try:
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
widget = internal_dpg.add_clipper(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, width=width, indent=indent, parent=parent, before=before, show=show, delay_search=delay_search, **kwargs)
internal_dpg.push_container_stack(widget)
yield widget
finally:
internal_dpg.pop_container_stack()
@contextmanager
def collapsing_header(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, indent: int =-1, parent: Union[int, str] =0, before: Union[int, str] =0, payload_type: str ='$$DPG_PAYLOAD', drag_callback: Callable =None, drop_callback: Callable =None, show: bool =True, pos: Union[List[int], Tuple[int, ...]] =[], filter_key: str ='', delay_search: bool =False, tracked: bool =False, track_offset: float =0.5, closable: bool =False, default_open: bool =False, open_on_double_click: bool =False, open_on_arrow: bool =False, leaf: bool =False, bullet: bool =False, **kwargs) -> Union[int, str]:
""" Adds a collapsing header to add items to. Must be closed with the end command.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
indent (int, optional): Offsets the widget to the right the specified number multiplied by the indent style.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
payload_type (str, optional): Sender string type must be the same as the target for the target to run the payload_callback.
drag_callback (Callable, optional): Registers a drag callback for drag and drop.
drop_callback (Callable, optional): Registers a drop callback for drag and drop.
show (bool, optional): Attempt to render widget.
pos (Union[List[int], Tuple[int, ...]], optional): Places the item relative to window coordinates, [0,0] is top left.
filter_key (str, optional): Used by filter widget.
delay_search (bool, optional): Delays searching container for specified items until the end of the app. Possible optimization when a container has many children that are not accessed often.
tracked (bool, optional): Scroll tracking
track_offset (float, optional): 0.0f:top, 0.5f:center, 1.0f:bottom
closable (bool, optional): Adds the ability to hide this widget by pressing the (x) in the top right of widget.
default_open (bool, optional): Sets the collapseable header open by default.
open_on_double_click (bool, optional): Need double-click to open node.
open_on_arrow (bool, optional): Only open when clicking on the arrow part.
leaf (bool, optional): No collapsing, no arrow (use as a convenience for leaf nodes).
bullet (bool, optional): Display a bullet instead of arrow.
id (Union[int, str], optional): (deprecated)
Yields:
Union[int, str]
"""
try:
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
widget = internal_dpg.add_collapsing_header(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, indent=indent, parent=parent, before=before, payload_type=payload_type, drag_callback=drag_callback, drop_callback=drop_callback, show=show, pos=pos, filter_key=filter_key, delay_search=delay_search, tracked=tracked, track_offset=track_offset, closable=closable, default_open=default_open, open_on_double_click=open_on_double_click, open_on_arrow=open_on_arrow, leaf=leaf, bullet=bullet, **kwargs)
internal_dpg.push_container_stack(widget)
yield widget
finally:
internal_dpg.pop_container_stack()
@contextmanager
def colormap_registry(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, show: bool =False, **kwargs) -> Union[int, str]:
""" Adds a colormap registry.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
show (bool, optional): Attempt to render widget.
id (Union[int, str], optional): (deprecated)
Yields:
Union[int, str]
"""
try:
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
widget = internal_dpg.add_colormap_registry(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, show=show, **kwargs)
internal_dpg.push_container_stack(widget)
yield widget
finally:
internal_dpg.pop_container_stack()
@contextmanager
def drag_payload(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, parent: Union[int, str] =0, show: bool =True, drag_data: Any =None, drop_data: Any =None, payload_type: str ='$$DPG_PAYLOAD', **kwargs) -> Union[int, str]:
""" User data payload for drag and drop operations.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
show (bool, optional): Attempt to render widget.
drag_data (Any, optional): Drag data
drop_data (Any, optional): Drop data
payload_type (str, optional):
id (Union[int, str], optional): (deprecated)
Yields:
Union[int, str]
"""
try:
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
widget = internal_dpg.add_drag_payload(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, parent=parent, show=show, drag_data=drag_data, drop_data=drop_data, payload_type=payload_type, **kwargs)
internal_dpg.push_container_stack(widget)
yield widget
finally:
internal_dpg.pop_container_stack()
@contextmanager
def draw_layer(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, parent: Union[int, str] =0, before: Union[int, str] =0, show: bool =True, perspective_divide: bool =False, depth_clipping: bool =False, cull_mode: int =0, **kwargs) -> Union[int, str]:
""" New in 1.1. Creates a layer useful for grouping drawlist items.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
show (bool, optional): Attempt to render widget.
perspective_divide (bool, optional): New in 1.1. apply perspective divide
depth_clipping (bool, optional): New in 1.1. apply depth clipping
cull_mode (int, optional): New in 1.1. culling mode, mvCullMode_* constants. Only works with triangles currently.
id (Union[int, str], optional): (deprecated)
Yields:
Union[int, str]
"""
try:
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
widget = internal_dpg.add_draw_layer(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, parent=parent, before=before, show=show, perspective_divide=perspective_divide, depth_clipping=depth_clipping, cull_mode=cull_mode, **kwargs)
internal_dpg.push_container_stack(widget)
yield widget
finally:
internal_dpg.pop_container_stack()
@contextmanager
def draw_node(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, parent: Union[int, str] =0, before: Union[int, str] =0, show: bool =True, **kwargs) -> Union[int, str]:
""" New in 1.1. Creates a drawing node to associate a transformation matrix. Child node matricies will concatenate.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
show (bool, optional): Attempt to render widget.
id (Union[int, str], optional): (deprecated)
Yields:
Union[int, str]
"""
try:
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
widget = internal_dpg.add_draw_node(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, parent=parent, before=before, show=show, **kwargs)
internal_dpg.push_container_stack(widget)
yield widget
finally:
internal_dpg.pop_container_stack()
@contextmanager
def drawlist(width : int, height : int, *, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, parent: Union[int, str] =0, before: Union[int, str] =0, callback: Callable =None, show: bool =True, pos: Union[List[int], Tuple[int, ...]] =[], filter_key: str ='', delay_search: bool =False, tracked: bool =False, track_offset: float =0.5, **kwargs) -> Union[int, str]:
""" Adds a drawing canvas.
Args:
width (int):
height (int):
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
callback (Callable, optional): Registers a callback.
show (bool, optional): Attempt to render widget.
pos (Union[List[int], Tuple[int, ...]], optional): Places the item relative to window coordinates, [0,0] is top left.
filter_key (str, optional): Used by filter widget.
delay_search (bool, optional): Delays searching container for specified items until the end of the app. Possible optimization when a container has many children that are not accessed often.
tracked (bool, optional): Scroll tracking
track_offset (float, optional): 0.0f:top, 0.5f:center, 1.0f:bottom
id (Union[int, str], optional): (deprecated)
Yields:
Union[int, str]
"""
try:
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
widget = internal_dpg.add_drawlist(width, height, label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, parent=parent, before=before, callback=callback, show=show, pos=pos, filter_key=filter_key, delay_search=delay_search, tracked=tracked, track_offset=track_offset, **kwargs)
internal_dpg.push_container_stack(widget)
yield widget
finally:
internal_dpg.pop_container_stack()
@contextmanager
def file_dialog(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, width: int =0, height: int =0, callback: Callable =None, show: bool =True, default_path: str ='', default_filename: str ='.', file_count: int =0, modal: bool =False, directory_selector: bool =False, min_size: Union[List[int], Tuple[int, ...]] =[100, 100], max_size: Union[List[int], Tuple[int, ...]] =[30000, 30000], **kwargs) -> Union[int, str]:
""" Displays a file or directory selector depending on keywords. Displays a file dialog by default. Callback will be ran when the file or directory picker is closed. The app_data arguemnt will be populated with information related to the file and directory as a dictionary.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
width (int, optional): Width of the item.
height (int, optional): Height of the item.
callback (Callable, optional): Registers a callback.
show (bool, optional): Attempt to render widget.
default_path (str, optional): Path that the file dialog will default to when opened.
default_filename (str, optional): Default name that will show in the file name input.
file_count (int, optional): Number of visible files in the dialog.
modal (bool, optional): Forces user interaction with the file selector.
directory_selector (bool, optional): Shows only directory/paths as options. Allows selection of directory/paths only.
min_size (Union[List[int], Tuple[int, ...]], optional): Minimum window size.
max_size (Union[List[int], Tuple[int, ...]], optional): Maximum window size.
id (Union[int, str], optional): (deprecated)
Yields:
Union[int, str]
"""
try:
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
widget = internal_dpg.add_file_dialog(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, width=width, height=height, callback=callback, show=show, default_path=default_path, default_filename=default_filename, file_count=file_count, modal=modal, directory_selector=directory_selector, min_size=min_size, max_size=max_size, **kwargs)
internal_dpg.push_container_stack(widget)
yield widget
finally:
internal_dpg.pop_container_stack()
@contextmanager
def filter_set(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, width: int =0, indent: int =-1, parent: Union[int, str] =0, before: Union[int, str] =0, show: bool =True, delay_search: bool =False, **kwargs) -> Union[int, str]:
""" Helper to parse and apply text filters (e.g. aaaaa[, bbbbb][, ccccc])
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
width (int, optional): Width of the item.
indent (int, optional): Offsets the widget to the right the specified number multiplied by the indent style.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
show (bool, optional): Attempt to render widget.
delay_search (bool, optional): Delays searching container for specified items until the end of the app. Possible optimization when a container has many children that are not accessed often.
id (Union[int, str], optional): (deprecated)
Yields:
Union[int, str]
"""
try:
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
widget = internal_dpg.add_filter_set(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, width=width, indent=indent, parent=parent, before=before, show=show, delay_search=delay_search, **kwargs)
internal_dpg.push_container_stack(widget)
yield widget
finally:
internal_dpg.pop_container_stack()
@contextmanager
def font(file : str, size : int, *, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, parent: Union[int, str] =internal_dpg.mvReservedUUID_0, **kwargs) -> Union[int, str]:
""" Adds font to a font registry.
Args:
file (str):
size (int):
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
id (Union[int, str], optional): (deprecated)
default_font (bool, optional): (deprecated)
Yields:
Union[int, str]
"""
try:
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
if 'default_font' in kwargs.keys():
warnings.warn('default_font keyword removed', DeprecationWarning, 2)
kwargs.pop('default_font', None)
widget = internal_dpg.add_font(file, size, label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, parent=parent, **kwargs)
internal_dpg.push_container_stack(widget)
yield widget
finally:
internal_dpg.pop_container_stack()
@contextmanager
def font_registry(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, show: bool =True, **kwargs) -> Union[int, str]:
""" Adds a font registry.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
show (bool, optional): Attempt to render widget.
id (Union[int, str], optional): (deprecated)
Yields:
Union[int, str]
"""
try:
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
widget = internal_dpg.add_font_registry(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, show=show, **kwargs)
internal_dpg.push_container_stack(widget)
yield widget
finally:
internal_dpg.pop_container_stack()
@contextmanager
def group(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, width: int =0, indent: int =-1, parent: Union[int, str] =0, before: Union[int, str] =0, payload_type: str ='$$DPG_PAYLOAD', drag_callback: Callable =None, drop_callback: Callable =None, show: bool =True, pos: Union[List[int], Tuple[int, ...]] =[], filter_key: str ='', delay_search: bool =False, tracked: bool =False, track_offset: float =0.5, horizontal: bool =False, horizontal_spacing: float =-1, xoffset: float =0.0, **kwargs) -> Union[int, str]:
""" Creates a group that other widgets can belong to. The group allows item commands to be issued for all of its members.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
width (int, optional): Width of the item.
indent (int, optional): Offsets the widget to the right the specified number multiplied by the indent style.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
payload_type (str, optional): Sender string type must be the same as the target for the target to run the payload_callback.
drag_callback (Callable, optional): Registers a drag callback for drag and drop.
drop_callback (Callable, optional): Registers a drop callback for drag and drop.
show (bool, optional): Attempt to render widget.
pos (Union[List[int], Tuple[int, ...]], optional): Places the item relative to window coordinates, [0,0] is top left.
filter_key (str, optional): Used by filter widget.
delay_search (bool, optional): Delays searching container for specified items until the end of the app. Possible optimization when a container has many children that are not accessed often.
tracked (bool, optional): Scroll tracking
track_offset (float, optional): 0.0f:top, 0.5f:center, 1.0f:bottom
horizontal (bool, optional): Forces child widgets to be added in a horizontal layout.
horizontal_spacing (float, optional): Spacing for the horizontal layout.
xoffset (float, optional): Offset from containing window x item location within group.
id (Union[int, str], optional): (deprecated)
Yields:
Union[int, str]
"""
try:
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
widget = internal_dpg.add_group(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, width=width, indent=indent, parent=parent, before=before, payload_type=payload_type, drag_callback=drag_callback, drop_callback=drop_callback, show=show, pos=pos, filter_key=filter_key, delay_search=delay_search, tracked=tracked, track_offset=track_offset, horizontal=horizontal, horizontal_spacing=horizontal_spacing, xoffset=xoffset, **kwargs)
internal_dpg.push_container_stack(widget)
yield widget
finally:
internal_dpg.pop_container_stack()
@contextmanager
def handler_registry(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, show: bool =True, **kwargs) -> Union[int, str]:
""" Adds a handler registry.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
show (bool, optional): Attempt to render widget.
id (Union[int, str], optional): (deprecated)
Yields:
Union[int, str]
"""
try:
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
widget = internal_dpg.add_handler_registry(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, show=show, **kwargs)
internal_dpg.push_container_stack(widget)
yield widget
finally:
internal_dpg.pop_container_stack()
@contextmanager
def item_handler_registry(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, show: bool =True, **kwargs) -> Union[int, str]:
""" Adds an item handler registry.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
show (bool, optional): Attempt to render widget.
id (Union[int, str], optional): (deprecated)
Yields:
Union[int, str]
"""
try:
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
widget = internal_dpg.add_item_handler_registry(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, show=show, **kwargs)
internal_dpg.push_container_stack(widget)
yield widget
finally:
internal_dpg.pop_container_stack()
@contextmanager
def menu(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, indent: int =-1, parent: Union[int, str] =0, before: Union[int, str] =0, payload_type: str ='$$DPG_PAYLOAD', drop_callback: Callable =None, show: bool =True, enabled: bool =True, filter_key: str ='', delay_search: bool =False, tracked: bool =False, track_offset: float =0.5, **kwargs) -> Union[int, str]:
""" Adds a menu to an existing menu bar.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
indent (int, optional): Offsets the widget to the right the specified number multiplied by the indent style.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
payload_type (str, optional): Sender string type must be the same as the target for the target to run the payload_callback.
drop_callback (Callable, optional): Registers a drop callback for drag and drop.
show (bool, optional): Attempt to render widget.
enabled (bool, optional): Turns off functionality of widget and applies the disabled theme.
filter_key (str, optional): Used by filter widget.
delay_search (bool, optional): Delays searching container for specified items until the end of the app. Possible optimization when a container has many children that are not accessed often.
tracked (bool, optional): Scroll tracking
track_offset (float, optional): 0.0f:top, 0.5f:center, 1.0f:bottom
id (Union[int, str], optional): (deprecated)
Yields:
Union[int, str]
"""
try:
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
widget = internal_dpg.add_menu(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, indent=indent, parent=parent, before=before, payload_type=payload_type, drop_callback=drop_callback, show=show, enabled=enabled, filter_key=filter_key, delay_search=delay_search, tracked=tracked, track_offset=track_offset, **kwargs)
internal_dpg.push_container_stack(widget)
yield widget
finally:
internal_dpg.pop_container_stack()
@contextmanager
def menu_bar(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, indent: int =-1, parent: Union[int, str] =0, show: bool =True, delay_search: bool =False, **kwargs) -> Union[int, str]:
""" Adds a menu bar to a window.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
indent (int, optional): Offsets the widget to the right the specified number multiplied by the indent style.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
show (bool, optional): Attempt to render widget.
delay_search (bool, optional): Delays searching container for specified items until the end of the app. Possible optimization when a container has many children that are not accessed often.
id (Union[int, str], optional): (deprecated)
Yields:
Union[int, str]
"""
try:
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
widget = internal_dpg.add_menu_bar(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, indent=indent, parent=parent, show=show, delay_search=delay_search, **kwargs)
internal_dpg.push_container_stack(widget)
yield widget
finally:
internal_dpg.pop_container_stack()
@contextmanager
def node(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, parent: Union[int, str] =0, before: Union[int, str] =0, payload_type: str ='$$DPG_PAYLOAD', drag_callback: Callable =None, drop_callback: Callable =None, show: bool =True, pos: Union[List[int], Tuple[int, ...]] =[], filter_key: str ='', delay_search: bool =False, tracked: bool =False, track_offset: float =0.5, draggable: bool =True, **kwargs) -> Union[int, str]:
""" Adds a node to a node editor.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
payload_type (str, optional): Sender string type must be the same as the target for the target to run the payload_callback.
drag_callback (Callable, optional): Registers a drag callback for drag and drop.
drop_callback (Callable, optional): Registers a drop callback for drag and drop.
show (bool, optional): Attempt to render widget.
pos (Union[List[int], Tuple[int, ...]], optional): Places the item relative to window coordinates, [0,0] is top left.
filter_key (str, optional): Used by filter widget.
delay_search (bool, optional): Delays searching container for specified items until the end of the app. Possible optimization when a container has many children that are not accessed often.
tracked (bool, optional): Scroll tracking
track_offset (float, optional): 0.0f:top, 0.5f:center, 1.0f:bottom
draggable (bool, optional): Allow node to be draggable.
id (Union[int, str], optional): (deprecated)
Yields:
Union[int, str]
"""
try:
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
widget = internal_dpg.add_node(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, parent=parent, before=before, payload_type=payload_type, drag_callback=drag_callback, drop_callback=drop_callback, show=show, pos=pos, filter_key=filter_key, delay_search=delay_search, tracked=tracked, track_offset=track_offset, draggable=draggable, **kwargs)
internal_dpg.push_container_stack(widget)
yield widget
finally:
internal_dpg.pop_container_stack()
@contextmanager
def node_attribute(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, indent: int =-1, parent: Union[int, str] =0, before: Union[int, str] =0, show: bool =True, filter_key: str ='', tracked: bool =False, track_offset: float =0.5, attribute_type: int =0, shape: int =1, category: str ='general', **kwargs) -> Union[int, str]:
""" Adds a node attribute to a node.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
indent (int, optional): Offsets the widget to the right the specified number multiplied by the indent style.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
show (bool, optional): Attempt to render widget.
filter_key (str, optional): Used by filter widget.
tracked (bool, optional): Scroll tracking
track_offset (float, optional): 0.0f:top, 0.5f:center, 1.0f:bottom
attribute_type (int, optional): mvNode_Attr_Input, mvNode_Attr_Output, or mvNode_Attr_Static.
shape (int, optional): Pin shape.
category (str, optional): Category
id (Union[int, str], optional): (deprecated)
Yields:
Union[int, str]
"""
try:
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
widget = internal_dpg.add_node_attribute(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, indent=indent, parent=parent, before=before, show=show, filter_key=filter_key, tracked=tracked, track_offset=track_offset, attribute_type=attribute_type, shape=shape, category=category, **kwargs)
internal_dpg.push_container_stack(widget)
yield widget
finally:
internal_dpg.pop_container_stack()
@contextmanager
def node_editor(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, width: int =0, height: int =0, parent: Union[int, str] =0, before: Union[int, str] =0, callback: Callable =None, show: bool =True, filter_key: str ='', delay_search: bool =False, tracked: bool =False, track_offset: float =0.5, delink_callback: Callable =None, menubar: bool =False, **kwargs) -> Union[int, str]:
""" Adds a node editor.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
width (int, optional): Width of the item.
height (int, optional): Height of the item.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
callback (Callable, optional): Registers a callback.
show (bool, optional): Attempt to render widget.
filter_key (str, optional): Used by filter widget.
delay_search (bool, optional): Delays searching container for specified items until the end of the app. Possible optimization when a container has many children that are not accessed often.
tracked (bool, optional): Scroll tracking
track_offset (float, optional): 0.0f:top, 0.5f:center, 1.0f:bottom
delink_callback (Callable, optional): Callback ran when a link is detached.
menubar (bool, optional): Shows or hides the menubar.
id (Union[int, str], optional): (deprecated)
Yields:
Union[int, str]
"""
try:
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
widget = internal_dpg.add_node_editor(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, width=width, height=height, parent=parent, before=before, callback=callback, show=show, filter_key=filter_key, delay_search=delay_search, tracked=tracked, track_offset=track_offset, delink_callback=delink_callback, menubar=menubar, **kwargs)
internal_dpg.push_container_stack(widget)
yield widget
finally:
internal_dpg.pop_container_stack()
@contextmanager
def plot(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, width: int =0, height: int =0, indent: int =-1, parent: Union[int, str] =0, before: Union[int, str] =0, payload_type: str ='$$DPG_PAYLOAD', callback: Callable =None, drag_callback: Callable =None, drop_callback: Callable =None, show: bool =True, pos: Union[List[int], Tuple[int, ...]] =[], filter_key: str ='', delay_search: bool =False, tracked: bool =False, track_offset: float =0.5, no_title: bool =False, no_menus: bool =False, no_box_select: bool =False, no_mouse_pos: bool =False, no_highlight: bool =False, no_child: bool =False, query: bool =False, crosshairs: bool =False, anti_aliased: bool =False, equal_aspects: bool =False, pan_button: int =internal_dpg.mvMouseButton_Left, pan_mod: int =-1, fit_button: int =internal_dpg.mvMouseButton_Left, context_menu_button: int =internal_dpg.mvMouseButton_Right, box_select_button: int =internal_dpg.mvMouseButton_Right, box_select_mod: int =-1, box_select_cancel_button: int =internal_dpg.mvMouseButton_Left, query_button: int =internal_dpg.mvMouseButton_Middle, query_mod: int =-1, query_toggle_mod: int =internal_dpg.mvKey_Control, horizontal_mod: int =internal_dpg.mvKey_Alt, vertical_mod: int =internal_dpg.mvKey_Shift, **kwargs) -> Union[int, str]:
""" Adds a plot which is used to hold series, and can be drawn to with draw commands.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
width (int, optional): Width of the item.
height (int, optional): Height of the item.
indent (int, optional): Offsets the widget to the right the specified number multiplied by the indent style.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
payload_type (str, optional): Sender string type must be the same as the target for the target to run the payload_callback.
callback (Callable, optional): Registers a callback.
drag_callback (Callable, optional): Registers a drag callback for drag and drop.
drop_callback (Callable, optional): Registers a drop callback for drag and drop.
show (bool, optional): Attempt to render widget.
pos (Union[List[int], Tuple[int, ...]], optional): Places the item relative to window coordinates, [0,0] is top left.
filter_key (str, optional): Used by filter widget.
delay_search (bool, optional): Delays searching container for specified items until the end of the app. Possible optimization when a container has many children that are not accessed often.
tracked (bool, optional): Scroll tracking
track_offset (float, optional): 0.0f:top, 0.5f:center, 1.0f:bottom
no_title (bool, optional):
no_menus (bool, optional):
no_box_select (bool, optional):
no_mouse_pos (bool, optional):
no_highlight (bool, optional):
no_child (bool, optional):
query (bool, optional):
crosshairs (bool, optional):
anti_aliased (bool, optional):
equal_aspects (bool, optional):
pan_button (int, optional): enables panning when held
pan_mod (int, optional): optional modifier that must be held for panning
fit_button (int, optional): fits visible data when double clicked
context_menu_button (int, optional): opens plot context menu (if enabled) when clicked
box_select_button (int, optional): begins box selection when pressed and confirms selection when released
box_select_mod (int, optional): begins box selection when pressed and confirms selection when released
box_select_cancel_button (int, optional): cancels active box selection when pressed
query_button (int, optional): begins query selection when pressed and end query selection when released
query_mod (int, optional): optional modifier that must be held for query selection
query_toggle_mod (int, optional): when held, active box selections turn into queries
horizontal_mod (int, optional): expands active box selection/query horizontally to plot edge when held
vertical_mod (int, optional): expands active box selection/query vertically to plot edge when held
id (Union[int, str], optional): (deprecated)
Yields:
Union[int, str]
"""
try:
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
widget = internal_dpg.add_plot(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, width=width, height=height, indent=indent, parent=parent, before=before, payload_type=payload_type, callback=callback, drag_callback=drag_callback, drop_callback=drop_callback, show=show, pos=pos, filter_key=filter_key, delay_search=delay_search, tracked=tracked, track_offset=track_offset, no_title=no_title, no_menus=no_menus, no_box_select=no_box_select, no_mouse_pos=no_mouse_pos, no_highlight=no_highlight, no_child=no_child, query=query, crosshairs=crosshairs, anti_aliased=anti_aliased, equal_aspects=equal_aspects, pan_button=pan_button, pan_mod=pan_mod, fit_button=fit_button, context_menu_button=context_menu_button, box_select_button=box_select_button, box_select_mod=box_select_mod, box_select_cancel_button=box_select_cancel_button, query_button=query_button, query_mod=query_mod, query_toggle_mod=query_toggle_mod, horizontal_mod=horizontal_mod, vertical_mod=vertical_mod, **kwargs)
internal_dpg.push_container_stack(widget)
yield widget
finally:
internal_dpg.pop_container_stack()
@contextmanager
def plot_axis(axis : int, *, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, parent: Union[int, str] =0, payload_type: str ='$$DPG_PAYLOAD', drop_callback: Callable =None, show: bool =True, no_gridlines: bool =False, no_tick_marks: bool =False, no_tick_labels: bool =False, log_scale: bool =False, invert: bool =False, lock_min: bool =False, lock_max: bool =False, time: bool =False, **kwargs) -> Union[int, str]:
""" Adds an axis to a plot.
Args:
axis (int):
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
payload_type (str, optional): Sender string type must be the same as the target for the target to run the payload_callback.
drop_callback (Callable, optional): Registers a drop callback for drag and drop.
show (bool, optional): Attempt to render widget.
no_gridlines (bool, optional):
no_tick_marks (bool, optional):
no_tick_labels (bool, optional):
log_scale (bool, optional):
invert (bool, optional):
lock_min (bool, optional):
lock_max (bool, optional):
time (bool, optional):
id (Union[int, str], optional): (deprecated)
Yields:
Union[int, str]
"""
try:
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
widget = internal_dpg.add_plot_axis(axis, label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, parent=parent, payload_type=payload_type, drop_callback=drop_callback, show=show, no_gridlines=no_gridlines, no_tick_marks=no_tick_marks, no_tick_labels=no_tick_labels, log_scale=log_scale, invert=invert, lock_min=lock_min, lock_max=lock_max, time=time, **kwargs)
internal_dpg.push_container_stack(widget)
yield widget
finally:
internal_dpg.pop_container_stack()
@contextmanager
def stage(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, **kwargs) -> Union[int, str]:
""" Adds a stage.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
id (Union[int, str], optional): (deprecated)
Yields:
Union[int, str]
"""
try:
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
widget = internal_dpg.add_stage(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, **kwargs)
internal_dpg.push_container_stack(widget)
yield widget
finally:
internal_dpg.pop_container_stack()
@contextmanager
def subplots(rows : int, columns : int, *, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, width: int =0, height: int =0, indent: int =-1, parent: Union[int, str] =0, before: Union[int, str] =0, callback: Callable =None, show: bool =True, pos: Union[List[int], Tuple[int, ...]] =[], filter_key: str ='', delay_search: bool =False, tracked: bool =False, track_offset: float =0.5, row_ratios: Union[List[float], Tuple[float, ...]] =[], column_ratios: Union[List[float], Tuple[float, ...]] =[], no_title: bool =False, no_menus: bool =False, no_resize: bool =False, no_align: bool =False, link_rows: bool =False, link_columns: bool =False, link_all_x: bool =False, link_all_y: bool =False, column_major: bool =False, **kwargs) -> Union[int, str]:
""" Adds a collection of plots.
Args:
rows (int):
columns (int):
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
width (int, optional): Width of the item.
height (int, optional): Height of the item.
indent (int, optional): Offsets the widget to the right the specified number multiplied by the indent style.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
callback (Callable, optional): Registers a callback.
show (bool, optional): Attempt to render widget.
pos (Union[List[int], Tuple[int, ...]], optional): Places the item relative to window coordinates, [0,0] is top left.
filter_key (str, optional): Used by filter widget.
delay_search (bool, optional): Delays searching container for specified items until the end of the app. Possible optimization when a container has many children that are not accessed often.
tracked (bool, optional): Scroll tracking
track_offset (float, optional): 0.0f:top, 0.5f:center, 1.0f:bottom
row_ratios (Union[List[float], Tuple[float, ...]], optional):
column_ratios (Union[List[float], Tuple[float, ...]], optional):
no_title (bool, optional):
no_menus (bool, optional): the user will not be able to open context menus with right-click
no_resize (bool, optional): resize splitters between subplot cells will be not be provided
no_align (bool, optional): subplot edges will not be aligned vertically or horizontally
link_rows (bool, optional): link the y-axis limits of all plots in each row (does not apply auxiliary y-axes)
link_columns (bool, optional): link the x-axis limits of all plots in each column
link_all_x (bool, optional): link the x-axis limits in every plot in the subplot
link_all_y (bool, optional): link the y-axis limits in every plot in the subplot (does not apply to auxiliary y-axes)
column_major (bool, optional): subplots are added in column major order instead of the default row major order
id (Union[int, str], optional): (deprecated)
Yields:
Union[int, str]
"""
try:
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
widget = internal_dpg.add_subplots(rows, columns, label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, width=width, height=height, indent=indent, parent=parent, before=before, callback=callback, show=show, pos=pos, filter_key=filter_key, delay_search=delay_search, tracked=tracked, track_offset=track_offset, row_ratios=row_ratios, column_ratios=column_ratios, no_title=no_title, no_menus=no_menus, no_resize=no_resize, no_align=no_align, link_rows=link_rows, link_columns=link_columns, link_all_x=link_all_x, link_all_y=link_all_y, column_major=column_major, **kwargs)
internal_dpg.push_container_stack(widget)
yield widget
finally:
internal_dpg.pop_container_stack()
@contextmanager
def tab(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, indent: int =-1, parent: Union[int, str] =0, before: Union[int, str] =0, payload_type: str ='$$DPG_PAYLOAD', drop_callback: Callable =None, show: bool =True, filter_key: str ='', delay_search: bool =False, tracked: bool =False, track_offset: float =0.5, closable: bool =False, no_tooltip: bool =False, order_mode: bool =0, **kwargs) -> Union[int, str]:
""" Adds a tab to a tab bar.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
indent (int, optional): Offsets the widget to the right the specified number multiplied by the indent style.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
payload_type (str, optional): Sender string type must be the same as the target for the target to run the payload_callback.
drop_callback (Callable, optional): Registers a drop callback for drag and drop.
show (bool, optional): Attempt to render widget.
filter_key (str, optional): Used by filter widget.
delay_search (bool, optional): Delays searching container for specified items until the end of the app. Possible optimization when a container has many children that are not accessed often.
tracked (bool, optional): Scroll tracking
track_offset (float, optional): 0.0f:top, 0.5f:center, 1.0f:bottom
closable (bool, optional): Creates a button on the tab that can hide the tab.
no_tooltip (bool, optional): Disable tooltip for the given tab.
order_mode (bool, optional): set using a constant: mvTabOrder_Reorderable: allows reordering, mvTabOrder_Fixed: fixed ordering, mvTabOrder_Leading: adds tab to front, mvTabOrder_Trailing: adds tab to back
id (Union[int, str], optional): (deprecated)
Yields:
Union[int, str]
"""
try:
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
widget = internal_dpg.add_tab(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, indent=indent, parent=parent, before=before, payload_type=payload_type, drop_callback=drop_callback, show=show, filter_key=filter_key, delay_search=delay_search, tracked=tracked, track_offset=track_offset, closable=closable, no_tooltip=no_tooltip, order_mode=order_mode, **kwargs)
internal_dpg.push_container_stack(widget)
yield widget
finally:
internal_dpg.pop_container_stack()
@contextmanager
def tab_bar(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, indent: int =-1, parent: Union[int, str] =0, before: Union[int, str] =0, callback: Callable =None, show: bool =True, pos: Union[List[int], Tuple[int, ...]] =[], filter_key: str ='', delay_search: bool =False, tracked: bool =False, track_offset: float =0.5, reorderable: bool =False, **kwargs) -> Union[int, str]:
""" Adds a tab bar.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
indent (int, optional): Offsets the widget to the right the specified number multiplied by the indent style.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
callback (Callable, optional): Registers a callback.
show (bool, optional): Attempt to render widget.
pos (Union[List[int], Tuple[int, ...]], optional): Places the item relative to window coordinates, [0,0] is top left.
filter_key (str, optional): Used by filter widget.
delay_search (bool, optional): Delays searching container for specified items until the end of the app. Possible optimization when a container has many children that are not accessed often.
tracked (bool, optional): Scroll tracking
track_offset (float, optional): 0.0f:top, 0.5f:center, 1.0f:bottom
reorderable (bool, optional): Allows for the user to change the order of the tabs.
id (Union[int, str], optional): (deprecated)
Yields:
Union[int, str]
"""
try:
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
widget = internal_dpg.add_tab_bar(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, indent=indent, parent=parent, before=before, callback=callback, show=show, pos=pos, filter_key=filter_key, delay_search=delay_search, tracked=tracked, track_offset=track_offset, reorderable=reorderable, **kwargs)
internal_dpg.push_container_stack(widget)
yield widget
finally:
internal_dpg.pop_container_stack()
@contextmanager
def table(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, width: int =0, height: int =0, indent: int =-1, parent: Union[int, str] =0, before: Union[int, str] =0, source: Union[int, str] =0, callback: Callable =None, show: bool =True, pos: Union[List[int], Tuple[int, ...]] =[], filter_key: str ='', delay_search: bool =False, header_row: bool =True, clipper: bool =False, inner_width: int =0, policy: int =0, freeze_rows: int =0, freeze_columns: int =0, sort_multi: bool =False, sort_tristate: bool =False, resizable: bool =False, reorderable: bool =False, hideable: bool =False, sortable: bool =False, context_menu_in_body: bool =False, row_background: bool =False, borders_innerH: bool =False, borders_outerH: bool =False, borders_innerV: bool =False, borders_outerV: bool =False, no_host_extendX: bool =False, no_host_extendY: bool =False, no_keep_columns_visible: bool =False, precise_widths: bool =False, no_clip: bool =False, pad_outerX: bool =False, no_pad_outerX: bool =False, no_pad_innerX: bool =False, scrollX: bool =False, scrollY: bool =False, no_saved_settings: bool =False, **kwargs) -> Union[int, str]:
""" Adds a table.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
width (int, optional): Width of the item.
height (int, optional): Height of the item.
indent (int, optional): Offsets the widget to the right the specified number multiplied by the indent style.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
source (Union[int, str], optional): Overrides 'id' as value storage key.
callback (Callable, optional): Registers a callback.
show (bool, optional): Attempt to render widget.
pos (Union[List[int], Tuple[int, ...]], optional): Places the item relative to window coordinates, [0,0] is top left.
filter_key (str, optional): Used by filter widget.
delay_search (bool, optional): Delays searching container for specified items until the end of the app. Possible optimization when a container has many children that are not accessed often.
header_row (bool, optional): show headers at the top of the columns
clipper (bool, optional): Use clipper (rows must be same height).
inner_width (int, optional):
policy (int, optional):
freeze_rows (int, optional):
freeze_columns (int, optional):
sort_multi (bool, optional): Hold shift when clicking headers to sort on multiple column.
sort_tristate (bool, optional): Allow no sorting, disable default sorting.
resizable (bool, optional): Enable resizing columns
reorderable (bool, optional): Enable reordering columns in header row (need calling TableSetupColumn() + TableHeadersRow() to display headers)
hideable (bool, optional): Enable hiding/disabling columns in context menu.
sortable (bool, optional): Enable sorting. Call TableGetSortSpecs() to obtain sort specs. Also see ImGuiTableFlags_SortMulti and ImGuiTableFlags_SortTristate.
context_menu_in_body (bool, optional): Right-click on columns body/contents will display table context menu. By default it is available in TableHeadersRow().
row_background (bool, optional): Set each RowBg color with ImGuiCol_TableRowBg or ImGuiCol_TableRowBgAlt (equivalent of calling TableSetBgColor with ImGuiTableBgFlags_RowBg0 on each row manually)
borders_innerH (bool, optional): Draw horizontal borders between rows.
borders_outerH (bool, optional): Draw horizontal borders at the top and bottom.
borders_innerV (bool, optional): Draw vertical borders between columns.
borders_outerV (bool, optional): Draw vertical borders on the left and right sides.
no_host_extendX (bool, optional): Make outer width auto-fit to columns, overriding outer_size.x value. Only available when ScrollX/ScrollY are disabled and Stretch columns are not used.
no_host_extendY (bool, optional): Make outer height stop exactly at outer_size.y (prevent auto-extending table past the limit). Only available when ScrollX/ScrollY are disabled. Data below the limit will be clipped and not visible.
no_keep_columns_visible (bool, optional): Disable keeping column always minimally visible when ScrollX is off and table gets too small. Not recommended if columns are resizable.
precise_widths (bool, optional): Disable distributing remainder width to stretched columns (width allocation on a 100-wide table with 3 columns: Without this flag: 33,33,34. With this flag: 33,33,33). With larger number of columns, resizing will appear to be less smooth.
no_clip (bool, optional): Disable clipping rectangle for every individual columns.
pad_outerX (bool, optional): Default if BordersOuterV is on. Enable outer-most padding. Generally desirable if you have headers.
no_pad_outerX (bool, optional): Default if BordersOuterV is off. Disable outer-most padding.
no_pad_innerX (bool, optional): Disable inner padding between columns (double inner padding if BordersOuterV is on, single inner padding if BordersOuterV is off).
scrollX (bool, optional): Enable horizontal scrolling. Require 'outer_size' parameter of BeginTable() to specify the container size. Changes default sizing policy. Because this create a child window, ScrollY is currently generally recommended when using ScrollX.
scrollY (bool, optional): Enable vertical scrolling.
no_saved_settings (bool, optional): Never load/save settings in .ini file.
id (Union[int, str], optional): (deprecated)
Yields:
Union[int, str]
"""
try:
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
widget = internal_dpg.add_table(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, width=width, height=height, indent=indent, parent=parent, before=before, source=source, callback=callback, show=show, pos=pos, filter_key=filter_key, delay_search=delay_search, header_row=header_row, clipper=clipper, inner_width=inner_width, policy=policy, freeze_rows=freeze_rows, freeze_columns=freeze_columns, sort_multi=sort_multi, sort_tristate=sort_tristate, resizable=resizable, reorderable=reorderable, hideable=hideable, sortable=sortable, context_menu_in_body=context_menu_in_body, row_background=row_background, borders_innerH=borders_innerH, borders_outerH=borders_outerH, borders_innerV=borders_innerV, borders_outerV=borders_outerV, no_host_extendX=no_host_extendX, no_host_extendY=no_host_extendY, no_keep_columns_visible=no_keep_columns_visible, precise_widths=precise_widths, no_clip=no_clip, pad_outerX=pad_outerX, no_pad_outerX=no_pad_outerX, no_pad_innerX=no_pad_innerX, scrollX=scrollX, scrollY=scrollY, no_saved_settings=no_saved_settings, **kwargs)
internal_dpg.push_container_stack(widget)
yield widget
finally:
internal_dpg.pop_container_stack()
@contextmanager
def table_cell(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, height: int =0, parent: Union[int, str] =0, before: Union[int, str] =0, show: bool =True, filter_key: str ='', **kwargs) -> Union[int, str]:
""" Adds a table.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
height (int, optional): Height of the item.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
show (bool, optional): Attempt to render widget.
filter_key (str, optional): Used by filter widget.
id (Union[int, str], optional): (deprecated)
Yields:
Union[int, str]
"""
try:
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
widget = internal_dpg.add_table_cell(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, height=height, parent=parent, before=before, show=show, filter_key=filter_key, **kwargs)
internal_dpg.push_container_stack(widget)
yield widget
finally:
internal_dpg.pop_container_stack()
@contextmanager
def table_row(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, height: int =0, parent: Union[int, str] =0, before: Union[int, str] =0, show: bool =True, filter_key: str ='', **kwargs) -> Union[int, str]:
""" Adds a table row.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
height (int, optional): Height of the item.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
show (bool, optional): Attempt to render widget.
filter_key (str, optional): Used by filter widget.
id (Union[int, str], optional): (deprecated)
Yields:
Union[int, str]
"""
try:
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
widget = internal_dpg.add_table_row(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, height=height, parent=parent, before=before, show=show, filter_key=filter_key, **kwargs)
internal_dpg.push_container_stack(widget)
yield widget
finally:
internal_dpg.pop_container_stack()
@contextmanager
def template_registry(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, **kwargs) -> Union[int, str]:
""" Adds a template registry.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
id (Union[int, str], optional): (deprecated)
Yields:
Union[int, str]
"""
try:
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
widget = internal_dpg.add_template_registry(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, **kwargs)
internal_dpg.push_container_stack(widget)
yield widget
finally:
internal_dpg.pop_container_stack()
@contextmanager
def texture_registry(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, show: bool =False, **kwargs) -> Union[int, str]:
""" Adds a dynamic texture.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
show (bool, optional): Attempt to render widget.
id (Union[int, str], optional): (deprecated)
Yields:
Union[int, str]
"""
try:
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
widget = internal_dpg.add_texture_registry(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, show=show, **kwargs)
internal_dpg.push_container_stack(widget)
yield widget
finally:
internal_dpg.pop_container_stack()
@contextmanager
def theme(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, **kwargs) -> Union[int, str]:
""" Adds a theme.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
id (Union[int, str], optional): (deprecated)
default_theme (bool, optional): (deprecated)
Yields:
Union[int, str]
"""
try:
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
if 'default_theme' in kwargs.keys():
warnings.warn('default_theme keyword removed', DeprecationWarning, 2)
kwargs.pop('default_theme', None)
widget = internal_dpg.add_theme(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, **kwargs)
internal_dpg.push_container_stack(widget)
yield widget
finally:
internal_dpg.pop_container_stack()
@contextmanager
def theme_component(item_type : int =0, *, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, parent: Union[int, str] =0, before: Union[int, str] =0, enabled_state: bool =True, **kwargs) -> Union[int, str]:
""" Adds a theme component.
Args:
item_type (int, optional):
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
enabled_state (bool, optional):
id (Union[int, str], optional): (deprecated)
Yields:
Union[int, str]
"""
try:
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
widget = internal_dpg.add_theme_component(item_type, label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, parent=parent, before=before, enabled_state=enabled_state, **kwargs)
internal_dpg.push_container_stack(widget)
yield widget
finally:
internal_dpg.pop_container_stack()
@contextmanager
def tooltip(parent : Union[int, str], *, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, show: bool =True, **kwargs) -> Union[int, str]:
""" Adds a tooltip window.
Args:
parent (Union[int, str]):
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
show (bool, optional): Attempt to render widget.
id (Union[int, str], optional): (deprecated)
Yields:
Union[int, str]
"""
try:
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
widget = internal_dpg.add_tooltip(parent, label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, show=show, **kwargs)
internal_dpg.push_container_stack(widget)
yield widget
finally:
internal_dpg.pop_container_stack()
@contextmanager
def tree_node(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, indent: int =-1, parent: Union[int, str] =0, before: Union[int, str] =0, payload_type: str ='$$DPG_PAYLOAD', drag_callback: Callable =None, drop_callback: Callable =None, show: bool =True, pos: Union[List[int], Tuple[int, ...]] =[], filter_key: str ='', delay_search: bool =False, tracked: bool =False, track_offset: float =0.5, default_open: bool =False, open_on_double_click: bool =False, open_on_arrow: bool =False, leaf: bool =False, bullet: bool =False, selectable: bool =False, **kwargs) -> Union[int, str]:
""" Adds a tree node to add items to.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
indent (int, optional): Offsets the widget to the right the specified number multiplied by the indent style.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
payload_type (str, optional): Sender string type must be the same as the target for the target to run the payload_callback.
drag_callback (Callable, optional): Registers a drag callback for drag and drop.
drop_callback (Callable, optional): Registers a drop callback for drag and drop.
show (bool, optional): Attempt to render widget.
pos (Union[List[int], Tuple[int, ...]], optional): Places the item relative to window coordinates, [0,0] is top left.
filter_key (str, optional): Used by filter widget.
delay_search (bool, optional): Delays searching container for specified items until the end of the app. Possible optimization when a container has many children that are not accessed often.
tracked (bool, optional): Scroll tracking
track_offset (float, optional): 0.0f:top, 0.5f:center, 1.0f:bottom
default_open (bool, optional): Sets the tree node open by default.
open_on_double_click (bool, optional): Need double-click to open node.
open_on_arrow (bool, optional): Only open when clicking on the arrow part.
leaf (bool, optional): No collapsing, no arrow (use as a convenience for leaf nodes).
bullet (bool, optional): Display a bullet instead of arrow.
selectable (bool, optional): Makes the tree selectable.
id (Union[int, str], optional): (deprecated)
Yields:
Union[int, str]
"""
try:
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
widget = internal_dpg.add_tree_node(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, indent=indent, parent=parent, before=before, payload_type=payload_type, drag_callback=drag_callback, drop_callback=drop_callback, show=show, pos=pos, filter_key=filter_key, delay_search=delay_search, tracked=tracked, track_offset=track_offset, default_open=default_open, open_on_double_click=open_on_double_click, open_on_arrow=open_on_arrow, leaf=leaf, bullet=bullet, selectable=selectable, **kwargs)
internal_dpg.push_container_stack(widget)
yield widget
finally:
internal_dpg.pop_container_stack()
@contextmanager
def value_registry(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, **kwargs) -> Union[int, str]:
""" Adds a value registry.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
id (Union[int, str], optional): (deprecated)
Yields:
Union[int, str]
"""
try:
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
widget = internal_dpg.add_value_registry(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, **kwargs)
internal_dpg.push_container_stack(widget)
yield widget
finally:
internal_dpg.pop_container_stack()
@contextmanager
def viewport_drawlist(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, show: bool =True, filter_key: str ='', delay_search: bool =False, front: bool =True, **kwargs) -> Union[int, str]:
""" A container that is used to present draw items or layers directly to the viewport. By default this will draw to the back of the viewport. Layers and draw items should be added to this widget as children.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
show (bool, optional): Attempt to render widget.
filter_key (str, optional): Used by filter widget.
delay_search (bool, optional): Delays searching container for specified items until the end of the app. Possible optimization when a container has many children that are not accessed often.
front (bool, optional): Draws to the front of the view port instead of the back.
id (Union[int, str], optional): (deprecated)
Yields:
Union[int, str]
"""
try:
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
widget = internal_dpg.add_viewport_drawlist(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, show=show, filter_key=filter_key, delay_search=delay_search, front=front, **kwargs)
internal_dpg.push_container_stack(widget)
yield widget
finally:
internal_dpg.pop_container_stack()
@contextmanager
def viewport_menu_bar(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, indent: int =-1, parent: Union[int, str] =0, show: bool =True, delay_search: bool =False, **kwargs) -> Union[int, str]:
""" Adds a menubar to the viewport.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
indent (int, optional): Offsets the widget to the right the specified number multiplied by the indent style.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
show (bool, optional): Attempt to render widget.
delay_search (bool, optional): Delays searching container for specified items until the end of the app. Possible optimization when a container has many children that are not accessed often.
id (Union[int, str], optional): (deprecated)
Yields:
Union[int, str]
"""
try:
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
widget = internal_dpg.add_viewport_menu_bar(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, indent=indent, parent=parent, show=show, delay_search=delay_search, **kwargs)
internal_dpg.push_container_stack(widget)
yield widget
finally:
internal_dpg.pop_container_stack()
@contextmanager
def window(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, width: int =0, height: int =0, indent: int =-1, show: bool =True, pos: Union[List[int], Tuple[int, ...]] =[], delay_search: bool =False, min_size: Union[List[int], Tuple[int, ...]] =[100, 100], max_size: Union[List[int], Tuple[int, ...]] =[30000, 30000], menubar: bool =False, collapsed: bool =False, autosize: bool =False, no_resize: bool =False, no_title_bar: bool =False, no_move: bool =False, no_scrollbar: bool =False, no_collapse: bool =False, horizontal_scrollbar: bool =False, no_focus_on_appearing: bool =False, no_bring_to_front_on_focus: bool =False, no_close: bool =False, no_background: bool =False, modal: bool =False, popup: bool =False, no_saved_settings: bool =False, no_open_over_existing_popup: bool =True, on_close: Callable =None, **kwargs) -> Union[int, str]:
""" Creates a new window for following items to be added to.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
width (int, optional): Width of the item.
height (int, optional): Height of the item.
indent (int, optional): Offsets the widget to the right the specified number multiplied by the indent style.
show (bool, optional): Attempt to render widget.
pos (Union[List[int], Tuple[int, ...]], optional): Places the item relative to window coordinates, [0,0] is top left.
delay_search (bool, optional): Delays searching container for specified items until the end of the app. Possible optimization when a container has many children that are not accessed often.
min_size (Union[List[int], Tuple[int, ...]], optional): Minimum window size.
max_size (Union[List[int], Tuple[int, ...]], optional): Maximum window size.
menubar (bool, optional): Shows or hides the menubar.
collapsed (bool, optional): Collapse the window.
autosize (bool, optional): Autosized the window to fit it's items.
no_resize (bool, optional): Allows for the window size to be changed or fixed.
no_title_bar (bool, optional): Title name for the title bar of the window.
no_move (bool, optional): Allows for the window's position to be changed or fixed.
no_scrollbar (bool, optional): Disable scrollbars. (window can still scroll with mouse or programmatically)
no_collapse (bool, optional): Disable user collapsing window by double-clicking on it.
horizontal_scrollbar (bool, optional): Allow horizontal scrollbar to appear. (off by default)
no_focus_on_appearing (bool, optional): Disable taking focus when transitioning from hidden to visible state.
no_bring_to_front_on_focus (bool, optional): Disable bringing window to front when taking focus. (e.g. clicking on it or programmatically giving it focus)
no_close (bool, optional): Disable user closing the window by removing the close button.
no_background (bool, optional): Sets Background and border alpha to transparent.
modal (bool, optional): Fills area behind window according to the theme and disables user ability to interact with anything except the window.
popup (bool, optional): Fills area behind window according to the theme, removes title bar, collapse and close. Window can be closed by selecting area in the background behind the window.
no_saved_settings (bool, optional): Never load/save settings in .ini file.
no_open_over_existing_popup (bool, optional): Don't open if there's already a popup
on_close (Callable, optional): Callback ran when window is closed.
id (Union[int, str], optional): (deprecated)
Yields:
Union[int, str]
"""
try:
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
widget = internal_dpg.add_window(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, width=width, height=height, indent=indent, show=show, pos=pos, delay_search=delay_search, min_size=min_size, max_size=max_size, menubar=menubar, collapsed=collapsed, autosize=autosize, no_resize=no_resize, no_title_bar=no_title_bar, no_move=no_move, no_scrollbar=no_scrollbar, no_collapse=no_collapse, horizontal_scrollbar=horizontal_scrollbar, no_focus_on_appearing=no_focus_on_appearing, no_bring_to_front_on_focus=no_bring_to_front_on_focus, no_close=no_close, no_background=no_background, modal=modal, popup=popup, no_saved_settings=no_saved_settings, no_open_over_existing_popup=no_open_over_existing_popup, on_close=on_close, **kwargs)
internal_dpg.push_container_stack(widget)
yield widget
finally:
internal_dpg.pop_container_stack()
##########################################################
# Core Wrappings
##########################################################
def add_2d_histogram_series(x : Union[List[float], Tuple[float, ...]], y : Union[List[float], Tuple[float, ...]], *, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, parent: Union[int, str] =0, before: Union[int, str] =0, source: Union[int, str] =0, show: bool =True, xbins: int =-1, ybins: int =-1, xmin_range: float =0.0, xmax_range: float =1.0, ymin_range: float =0.0, ymax_range: float =1.0, density: bool =False, outliers: bool =True, **kwargs) -> Union[int, str]:
""" Adds a 2d histogram series.
Args:
x (Any):
y (Any):
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
source (Union[int, str], optional): Overrides 'id' as value storage key.
show (bool, optional): Attempt to render widget.
xbins (int, optional):
ybins (int, optional):
xmin_range (float, optional):
xmax_range (float, optional):
ymin_range (float, optional):
ymax_range (float, optional):
density (bool, optional):
outliers (bool, optional):
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_2d_histogram_series(x, y, label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, parent=parent, before=before, source=source, show=show, xbins=xbins, ybins=ybins, xmin_range=xmin_range, xmax_range=xmax_range, ymin_range=ymin_range, ymax_range=ymax_range, density=density, outliers=outliers, **kwargs)
def add_3d_slider(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, width: int =0, height: int =0, indent: int =-1, parent: Union[int, str] =0, before: Union[int, str] =0, source: Union[int, str] =0, payload_type: str ='$$DPG_PAYLOAD', callback: Callable =None, drag_callback: Callable =None, drop_callback: Callable =None, show: bool =True, pos: Union[List[int], Tuple[int, ...]] =[], filter_key: str ='', tracked: bool =False, track_offset: float =0.5, default_value: Union[List[float], Tuple[float, ...]] =(0.0, 0.0, 0.0, 0.0), max_x: float =100.0, max_y: float =100.0, max_z: float =100.0, min_x: float =0.0, min_y: float =0.0, min_z: float =0.0, scale: float =1.0, **kwargs) -> Union[int, str]:
""" Adds a 3D box slider.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
width (int, optional): Width of the item.
height (int, optional): Height of the item.
indent (int, optional): Offsets the widget to the right the specified number multiplied by the indent style.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
source (Union[int, str], optional): Overrides 'id' as value storage key.
payload_type (str, optional): Sender string type must be the same as the target for the target to run the payload_callback.
callback (Callable, optional): Registers a callback.
drag_callback (Callable, optional): Registers a drag callback for drag and drop.
drop_callback (Callable, optional): Registers a drop callback for drag and drop.
show (bool, optional): Attempt to render widget.
pos (Union[List[int], Tuple[int, ...]], optional): Places the item relative to window coordinates, [0,0] is top left.
filter_key (str, optional): Used by filter widget.
tracked (bool, optional): Scroll tracking
track_offset (float, optional): 0.0f:top, 0.5f:center, 1.0f:bottom
default_value (Union[List[float], Tuple[float, ...]], optional):
max_x (float, optional): Applies upper limit to slider.
max_y (float, optional): Applies upper limit to slider.
max_z (float, optional): Applies upper limit to slider.
min_x (float, optional): Applies lower limit to slider.
min_y (float, optional): Applies lower limit to slider.
min_z (float, optional): Applies lower limit to slider.
scale (float, optional): Size of the widget.
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_3d_slider(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, width=width, height=height, indent=indent, parent=parent, before=before, source=source, payload_type=payload_type, callback=callback, drag_callback=drag_callback, drop_callback=drop_callback, show=show, pos=pos, filter_key=filter_key, tracked=tracked, track_offset=track_offset, default_value=default_value, max_x=max_x, max_y=max_y, max_z=max_z, min_x=min_x, min_y=min_y, min_z=min_z, scale=scale, **kwargs)
def add_alias(alias : str, item : Union[int, str], **kwargs) -> None:
""" Adds an alias.
Args:
alias (str):
item (Union[int, str]):
Returns:
None
"""
return internal_dpg.add_alias(alias, item, **kwargs)
def add_area_series(x : Union[List[float], Tuple[float, ...]], y : Union[List[float], Tuple[float, ...]], *, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, parent: Union[int, str] =0, before: Union[int, str] =0, source: Union[int, str] =0, show: bool =True, fill: Union[List[int], Tuple[int, ...]] =(0, 0, 0, -255), contribute_to_bounds: bool =True, **kwargs) -> Union[int, str]:
""" Adds an area series to a plot.
Args:
x (Any):
y (Any):
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
source (Union[int, str], optional): Overrides 'id' as value storage key.
show (bool, optional): Attempt to render widget.
fill (Union[List[int], Tuple[int, ...]], optional):
contribute_to_bounds (bool, optional):
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_area_series(x, y, label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, parent=parent, before=before, source=source, show=show, fill=fill, contribute_to_bounds=contribute_to_bounds, **kwargs)
def add_bar_series(x : Union[List[float], Tuple[float, ...]], y : Union[List[float], Tuple[float, ...]], *, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, parent: Union[int, str] =0, before: Union[int, str] =0, source: Union[int, str] =0, show: bool =True, weight: float =1.0, horizontal: bool =False, **kwargs) -> Union[int, str]:
""" Adds a bar series to a plot.
Args:
x (Any):
y (Any):
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
source (Union[int, str], optional): Overrides 'id' as value storage key.
show (bool, optional): Attempt to render widget.
weight (float, optional):
horizontal (bool, optional):
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_bar_series(x, y, label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, parent=parent, before=before, source=source, show=show, weight=weight, horizontal=horizontal, **kwargs)
def add_bool_value(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, source: Union[int, str] =0, default_value: bool =False, parent: Union[int, str] =internal_dpg.mvReservedUUID_3, **kwargs) -> Union[int, str]:
""" Adds a bool value.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
source (Union[int, str], optional): Overrides 'id' as value storage key.
default_value (bool, optional):
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_bool_value(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, source=source, default_value=default_value, parent=parent, **kwargs)
def add_button(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, width: int =0, height: int =0, indent: int =-1, parent: Union[int, str] =0, before: Union[int, str] =0, payload_type: str ='$$DPG_PAYLOAD', callback: Callable =None, drag_callback: Callable =None, drop_callback: Callable =None, show: bool =True, enabled: bool =True, pos: Union[List[int], Tuple[int, ...]] =[], filter_key: str ='', tracked: bool =False, track_offset: float =0.5, small: bool =False, arrow: bool =False, direction: int =0, **kwargs) -> Union[int, str]:
""" Adds a button.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
width (int, optional): Width of the item.
height (int, optional): Height of the item.
indent (int, optional): Offsets the widget to the right the specified number multiplied by the indent style.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
payload_type (str, optional): Sender string type must be the same as the target for the target to run the payload_callback.
callback (Callable, optional): Registers a callback.
drag_callback (Callable, optional): Registers a drag callback for drag and drop.
drop_callback (Callable, optional): Registers a drop callback for drag and drop.
show (bool, optional): Attempt to render widget.
enabled (bool, optional): Turns off functionality of widget and applies the disabled theme.
pos (Union[List[int], Tuple[int, ...]], optional): Places the item relative to window coordinates, [0,0] is top left.
filter_key (str, optional): Used by filter widget.
tracked (bool, optional): Scroll tracking
track_offset (float, optional): 0.0f:top, 0.5f:center, 1.0f:bottom
small (bool, optional): Shrinks the size of the button to the text of the label it contains. Useful for embedding in text.
arrow (bool, optional): Displays an arrow in place of the text string. This requires the direction keyword.
direction (int, optional): Sets the cardinal direction for the arrow buy using constants mvDir_Left, mvDir_Up, mvDir_Down, mvDir_Right, mvDir_None. Arrow keyword must be set to True.
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_button(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, width=width, height=height, indent=indent, parent=parent, before=before, payload_type=payload_type, callback=callback, drag_callback=drag_callback, drop_callback=drop_callback, show=show, enabled=enabled, pos=pos, filter_key=filter_key, tracked=tracked, track_offset=track_offset, small=small, arrow=arrow, direction=direction, **kwargs)
def add_candle_series(dates : Union[List[float], Tuple[float, ...]], opens : Union[List[float], Tuple[float, ...]], closes : Union[List[float], Tuple[float, ...]], lows : Union[List[float], Tuple[float, ...]], highs : Union[List[float], Tuple[float, ...]], *, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, parent: Union[int, str] =0, before: Union[int, str] =0, source: Union[int, str] =0, show: bool =True, bull_color: Union[List[int], Tuple[int, ...]] =(0, 255, 113, 255), bear_color: Union[List[int], Tuple[int, ...]] =(218, 13, 79, 255), weight: int =0.25, tooltip: bool =True, **kwargs) -> Union[int, str]:
""" Adds a candle series to a plot.
Args:
dates (Any):
opens (Any):
closes (Any):
lows (Any):
highs (Any):
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
source (Union[int, str], optional): Overrides 'id' as value storage key.
show (bool, optional): Attempt to render widget.
bull_color (Union[List[int], Tuple[int, ...]], optional):
bear_color (Union[List[int], Tuple[int, ...]], optional):
weight (int, optional):
tooltip (bool, optional):
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_candle_series(dates, opens, closes, lows, highs, label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, parent=parent, before=before, source=source, show=show, bull_color=bull_color, bear_color=bear_color, weight=weight, tooltip=tooltip, **kwargs)
def add_char_remap(source : int, target : int, *, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, parent: Union[int, str] =0, **kwargs) -> Union[int, str]:
""" Remaps a character.
Args:
source (int):
target (int):
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_char_remap(source, target, label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, parent=parent, **kwargs)
def add_checkbox(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, indent: int =-1, parent: Union[int, str] =0, before: Union[int, str] =0, source: Union[int, str] =0, payload_type: str ='$$DPG_PAYLOAD', callback: Callable =None, drag_callback: Callable =None, drop_callback: Callable =None, show: bool =True, enabled: bool =True, pos: Union[List[int], Tuple[int, ...]] =[], filter_key: str ='', tracked: bool =False, track_offset: float =0.5, default_value: bool =False, **kwargs) -> Union[int, str]:
""" Adds a checkbox.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
indent (int, optional): Offsets the widget to the right the specified number multiplied by the indent style.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
source (Union[int, str], optional): Overrides 'id' as value storage key.
payload_type (str, optional): Sender string type must be the same as the target for the target to run the payload_callback.
callback (Callable, optional): Registers a callback.
drag_callback (Callable, optional): Registers a drag callback for drag and drop.
drop_callback (Callable, optional): Registers a drop callback for drag and drop.
show (bool, optional): Attempt to render widget.
enabled (bool, optional): Turns off functionality of widget and applies the disabled theme.
pos (Union[List[int], Tuple[int, ...]], optional): Places the item relative to window coordinates, [0,0] is top left.
filter_key (str, optional): Used by filter widget.
tracked (bool, optional): Scroll tracking
track_offset (float, optional): 0.0f:top, 0.5f:center, 1.0f:bottom
default_value (bool, optional): Sets the default value of the checkmark
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_checkbox(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, indent=indent, parent=parent, before=before, source=source, payload_type=payload_type, callback=callback, drag_callback=drag_callback, drop_callback=drop_callback, show=show, enabled=enabled, pos=pos, filter_key=filter_key, tracked=tracked, track_offset=track_offset, default_value=default_value, **kwargs)
def add_child_window(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, width: int =0, height: int =0, indent: int =-1, parent: Union[int, str] =0, before: Union[int, str] =0, payload_type: str ='$$DPG_PAYLOAD', drop_callback: Callable =None, show: bool =True, pos: Union[List[int], Tuple[int, ...]] =[], filter_key: str ='', delay_search: bool =False, tracked: bool =False, track_offset: float =0.5, border: bool =True, autosize_x: bool =False, autosize_y: bool =False, no_scrollbar: bool =False, horizontal_scrollbar: bool =False, menubar: bool =False, **kwargs) -> Union[int, str]:
""" Adds an embedded child window. Will show scrollbars when items do not fit.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
width (int, optional): Width of the item.
height (int, optional): Height of the item.
indent (int, optional): Offsets the widget to the right the specified number multiplied by the indent style.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
payload_type (str, optional): Sender string type must be the same as the target for the target to run the payload_callback.
drop_callback (Callable, optional): Registers a drop callback for drag and drop.
show (bool, optional): Attempt to render widget.
pos (Union[List[int], Tuple[int, ...]], optional): Places the item relative to window coordinates, [0,0] is top left.
filter_key (str, optional): Used by filter widget.
delay_search (bool, optional): Delays searching container for specified items until the end of the app. Possible optimization when a container has many children that are not accessed often.
tracked (bool, optional): Scroll tracking
track_offset (float, optional): 0.0f:top, 0.5f:center, 1.0f:bottom
border (bool, optional): Shows/Hides the border around the sides.
autosize_x (bool, optional): Autosize the window to its parents size in x.
autosize_y (bool, optional): Autosize the window to its parents size in y.
no_scrollbar (bool, optional): Disable scrollbars (window can still scroll with mouse or programmatically).
horizontal_scrollbar (bool, optional): Allow horizontal scrollbar to appear (off by default).
menubar (bool, optional): Shows/Hides the menubar at the top.
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_child_window(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, width=width, height=height, indent=indent, parent=parent, before=before, payload_type=payload_type, drop_callback=drop_callback, show=show, pos=pos, filter_key=filter_key, delay_search=delay_search, tracked=tracked, track_offset=track_offset, border=border, autosize_x=autosize_x, autosize_y=autosize_y, no_scrollbar=no_scrollbar, horizontal_scrollbar=horizontal_scrollbar, menubar=menubar, **kwargs)
def add_clipper(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, width: int =0, indent: int =-1, parent: Union[int, str] =0, before: Union[int, str] =0, show: bool =True, delay_search: bool =False, **kwargs) -> Union[int, str]:
""" Helper to manually clip large list of items. Increases performance by not searching or drawing widgets outside of the clipped region.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
width (int, optional): Width of the item.
indent (int, optional): Offsets the widget to the right the specified number multiplied by the indent style.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
show (bool, optional): Attempt to render widget.
delay_search (bool, optional): Delays searching container for specified items until the end of the app. Possible optimization when a container has many children that are not accessed often.
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_clipper(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, width=width, indent=indent, parent=parent, before=before, show=show, delay_search=delay_search, **kwargs)
def add_collapsing_header(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, indent: int =-1, parent: Union[int, str] =0, before: Union[int, str] =0, payload_type: str ='$$DPG_PAYLOAD', drag_callback: Callable =None, drop_callback: Callable =None, show: bool =True, pos: Union[List[int], Tuple[int, ...]] =[], filter_key: str ='', delay_search: bool =False, tracked: bool =False, track_offset: float =0.5, closable: bool =False, default_open: bool =False, open_on_double_click: bool =False, open_on_arrow: bool =False, leaf: bool =False, bullet: bool =False, **kwargs) -> Union[int, str]:
""" Adds a collapsing header to add items to. Must be closed with the end command.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
indent (int, optional): Offsets the widget to the right the specified number multiplied by the indent style.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
payload_type (str, optional): Sender string type must be the same as the target for the target to run the payload_callback.
drag_callback (Callable, optional): Registers a drag callback for drag and drop.
drop_callback (Callable, optional): Registers a drop callback for drag and drop.
show (bool, optional): Attempt to render widget.
pos (Union[List[int], Tuple[int, ...]], optional): Places the item relative to window coordinates, [0,0] is top left.
filter_key (str, optional): Used by filter widget.
delay_search (bool, optional): Delays searching container for specified items until the end of the app. Possible optimization when a container has many children that are not accessed often.
tracked (bool, optional): Scroll tracking
track_offset (float, optional): 0.0f:top, 0.5f:center, 1.0f:bottom
closable (bool, optional): Adds the ability to hide this widget by pressing the (x) in the top right of widget.
default_open (bool, optional): Sets the collapseable header open by default.
open_on_double_click (bool, optional): Need double-click to open node.
open_on_arrow (bool, optional): Only open when clicking on the arrow part.
leaf (bool, optional): No collapsing, no arrow (use as a convenience for leaf nodes).
bullet (bool, optional): Display a bullet instead of arrow.
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_collapsing_header(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, indent=indent, parent=parent, before=before, payload_type=payload_type, drag_callback=drag_callback, drop_callback=drop_callback, show=show, pos=pos, filter_key=filter_key, delay_search=delay_search, tracked=tracked, track_offset=track_offset, closable=closable, default_open=default_open, open_on_double_click=open_on_double_click, open_on_arrow=open_on_arrow, leaf=leaf, bullet=bullet, **kwargs)
def add_color_button(default_value : Union[List[int], Tuple[int, ...]] =(0, 0, 0, 255), *, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, width: int =0, height: int =0, indent: int =-1, parent: Union[int, str] =0, before: Union[int, str] =0, payload_type: str ='$$DPG_PAYLOAD', callback: Callable =None, drag_callback: Callable =None, drop_callback: Callable =None, show: bool =True, enabled: bool =True, pos: Union[List[int], Tuple[int, ...]] =[], filter_key: str ='', tracked: bool =False, track_offset: float =0.5, no_alpha: bool =False, no_border: bool =False, no_drag_drop: bool =False, **kwargs) -> Union[int, str]:
""" Adds a color button.
Args:
default_value (Union[List[int], Tuple[int, ...]], optional):
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
width (int, optional): Width of the item.
height (int, optional): Height of the item.
indent (int, optional): Offsets the widget to the right the specified number multiplied by the indent style.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
payload_type (str, optional): Sender string type must be the same as the target for the target to run the payload_callback.
callback (Callable, optional): Registers a callback.
drag_callback (Callable, optional): Registers a drag callback for drag and drop.
drop_callback (Callable, optional): Registers a drop callback for drag and drop.
show (bool, optional): Attempt to render widget.
enabled (bool, optional): Turns off functionality of widget and applies the disabled theme.
pos (Union[List[int], Tuple[int, ...]], optional): Places the item relative to window coordinates, [0,0] is top left.
filter_key (str, optional): Used by filter widget.
tracked (bool, optional): Scroll tracking
track_offset (float, optional): 0.0f:top, 0.5f:center, 1.0f:bottom
no_alpha (bool, optional): Removes the displayed slider that can change alpha channel.
no_border (bool, optional): Disable border around the image.
no_drag_drop (bool, optional): Disable ability to drag and drop small preview (color square) to apply colors to other items.
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_color_button(default_value, label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, width=width, height=height, indent=indent, parent=parent, before=before, payload_type=payload_type, callback=callback, drag_callback=drag_callback, drop_callback=drop_callback, show=show, enabled=enabled, pos=pos, filter_key=filter_key, tracked=tracked, track_offset=track_offset, no_alpha=no_alpha, no_border=no_border, no_drag_drop=no_drag_drop, **kwargs)
def add_color_edit(default_value : Union[List[int], Tuple[int, ...]] =(0, 0, 0, 255), *, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, width: int =0, height: int =0, indent: int =-1, parent: Union[int, str] =0, before: Union[int, str] =0, source: Union[int, str] =0, payload_type: str ='$$DPG_PAYLOAD', callback: Callable =None, drag_callback: Callable =None, drop_callback: Callable =None, show: bool =True, enabled: bool =True, pos: Union[List[int], Tuple[int, ...]] =[], filter_key: str ='', tracked: bool =False, track_offset: float =0.5, no_alpha: bool =False, no_picker: bool =False, no_options: bool =False, no_small_preview: bool =False, no_inputs: bool =False, no_tooltip: bool =False, no_label: bool =False, no_drag_drop: bool =False, alpha_bar: bool =False, alpha_preview: int =0, display_mode: int =1048576, display_type: int =8388608, input_mode: int =134217728, **kwargs) -> Union[int, str]:
""" Adds an RGBA color editor. Left clicking the small color preview will provide a color picker. Click and draging the small color preview will copy the color to be applied on any other color widget.
Args:
default_value (Union[List[int], Tuple[int, ...]], optional):
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
width (int, optional): Width of the item.
height (int, optional): Height of the item.
indent (int, optional): Offsets the widget to the right the specified number multiplied by the indent style.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
source (Union[int, str], optional): Overrides 'id' as value storage key.
payload_type (str, optional): Sender string type must be the same as the target for the target to run the payload_callback.
callback (Callable, optional): Registers a callback.
drag_callback (Callable, optional): Registers a drag callback for drag and drop.
drop_callback (Callable, optional): Registers a drop callback for drag and drop.
show (bool, optional): Attempt to render widget.
enabled (bool, optional): Turns off functionality of widget and applies the disabled theme.
pos (Union[List[int], Tuple[int, ...]], optional): Places the item relative to window coordinates, [0,0] is top left.
filter_key (str, optional): Used by filter widget.
tracked (bool, optional): Scroll tracking
track_offset (float, optional): 0.0f:top, 0.5f:center, 1.0f:bottom
no_alpha (bool, optional): Removes the displayed slider that can change alpha channel.
no_picker (bool, optional): Disable picker popup when color square is clicked.
no_options (bool, optional): Disable toggling options menu when right-clicking on inputs/small preview.
no_small_preview (bool, optional): Disable colored square preview next to the inputs. (e.g. to show only the inputs). This only displays if the side preview is not shown.
no_inputs (bool, optional): Disable inputs sliders/text widgets. (e.g. to show only the small preview colored square)
no_tooltip (bool, optional): Disable tooltip when hovering the preview.
no_label (bool, optional): Disable display of inline text label.
no_drag_drop (bool, optional): Disable ability to drag and drop small preview (color square) to apply colors to other items.
alpha_bar (bool, optional): Show vertical alpha bar/gradient in picker.
alpha_preview (int, optional): mvColorEdit_AlphaPreviewNone, mvColorEdit_AlphaPreview, or mvColorEdit_AlphaPreviewHalf
display_mode (int, optional): mvColorEdit_rgb, mvColorEdit_hsv, or mvColorEdit_hex
display_type (int, optional): mvColorEdit_uint8 or mvColorEdit_float
input_mode (int, optional): mvColorEdit_input_rgb or mvColorEdit_input_hsv
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_color_edit(default_value, label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, width=width, height=height, indent=indent, parent=parent, before=before, source=source, payload_type=payload_type, callback=callback, drag_callback=drag_callback, drop_callback=drop_callback, show=show, enabled=enabled, pos=pos, filter_key=filter_key, tracked=tracked, track_offset=track_offset, no_alpha=no_alpha, no_picker=no_picker, no_options=no_options, no_small_preview=no_small_preview, no_inputs=no_inputs, no_tooltip=no_tooltip, no_label=no_label, no_drag_drop=no_drag_drop, alpha_bar=alpha_bar, alpha_preview=alpha_preview, display_mode=display_mode, display_type=display_type, input_mode=input_mode, **kwargs)
def add_color_picker(default_value : Union[List[int], Tuple[int, ...]] =(0, 0, 0, 255), *, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, width: int =0, height: int =0, indent: int =-1, parent: Union[int, str] =0, before: Union[int, str] =0, source: Union[int, str] =0, payload_type: str ='$$DPG_PAYLOAD', callback: Callable =None, drag_callback: Callable =None, drop_callback: Callable =None, show: bool =True, enabled: bool =True, pos: Union[List[int], Tuple[int, ...]] =[], filter_key: str ='', tracked: bool =False, track_offset: float =0.5, no_alpha: bool =False, no_side_preview: bool =False, no_small_preview: bool =False, no_inputs: bool =False, no_tooltip: bool =False, no_label: bool =False, alpha_bar: bool =False, display_rgb: bool =False, display_hsv: bool =False, display_hex: bool =False, picker_mode: int =33554432, alpha_preview: int =0, display_type: int =8388608, input_mode: int =134217728, **kwargs) -> Union[int, str]:
""" Adds an RGB color picker. Right click the color picker for options. Click and drag the color preview to copy the color and drop on any other color widget to apply. Right Click allows the style of the color picker to be changed.
Args:
default_value (Union[List[int], Tuple[int, ...]], optional):
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
width (int, optional): Width of the item.
height (int, optional): Height of the item.
indent (int, optional): Offsets the widget to the right the specified number multiplied by the indent style.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
source (Union[int, str], optional): Overrides 'id' as value storage key.
payload_type (str, optional): Sender string type must be the same as the target for the target to run the payload_callback.
callback (Callable, optional): Registers a callback.
drag_callback (Callable, optional): Registers a drag callback for drag and drop.
drop_callback (Callable, optional): Registers a drop callback for drag and drop.
show (bool, optional): Attempt to render widget.
enabled (bool, optional): Turns off functionality of widget and applies the disabled theme.
pos (Union[List[int], Tuple[int, ...]], optional): Places the item relative to window coordinates, [0,0] is top left.
filter_key (str, optional): Used by filter widget.
tracked (bool, optional): Scroll tracking
track_offset (float, optional): 0.0f:top, 0.5f:center, 1.0f:bottom
no_alpha (bool, optional): Removes the displayed slider that can change alpha channel.
no_side_preview (bool, optional): Disable bigger color preview on right side of the picker, use small colored square preview instead , unless small preview is also hidden.
no_small_preview (bool, optional): Disable colored square preview next to the inputs. (e.g. to show only the inputs). This only displays if the side preview is not shown.
no_inputs (bool, optional): Disable inputs sliders/text widgets. (e.g. to show only the small preview colored square)
no_tooltip (bool, optional): Disable tooltip when hovering the preview.
no_label (bool, optional): Disable display of inline text label.
alpha_bar (bool, optional): Show vertical alpha bar/gradient in picker.
display_rgb (bool, optional): Override _display_ type among RGB/HSV/Hex.
display_hsv (bool, optional): Override _display_ type among RGB/HSV/Hex.
display_hex (bool, optional): Override _display_ type among RGB/HSV/Hex.
picker_mode (int, optional): mvColorPicker_bar or mvColorPicker_wheel
alpha_preview (int, optional): mvColorEdit_AlphaPreviewNone, mvColorEdit_AlphaPreview, or mvColorEdit_AlphaPreviewHalf
display_type (int, optional): mvColorEdit_uint8 or mvColorEdit_float
input_mode (int, optional): mvColorEdit_input_rgb or mvColorEdit_input_hsv
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_color_picker(default_value, label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, width=width, height=height, indent=indent, parent=parent, before=before, source=source, payload_type=payload_type, callback=callback, drag_callback=drag_callback, drop_callback=drop_callback, show=show, enabled=enabled, pos=pos, filter_key=filter_key, tracked=tracked, track_offset=track_offset, no_alpha=no_alpha, no_side_preview=no_side_preview, no_small_preview=no_small_preview, no_inputs=no_inputs, no_tooltip=no_tooltip, no_label=no_label, alpha_bar=alpha_bar, display_rgb=display_rgb, display_hsv=display_hsv, display_hex=display_hex, picker_mode=picker_mode, alpha_preview=alpha_preview, display_type=display_type, input_mode=input_mode, **kwargs)
def add_color_value(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, source: Union[int, str] =0, default_value: Union[List[float], Tuple[float, ...]] =(0.0, 0.0, 0.0, 0.0), parent: Union[int, str] =internal_dpg.mvReservedUUID_3, **kwargs) -> Union[int, str]:
""" Adds a color value.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
source (Union[int, str], optional): Overrides 'id' as value storage key.
default_value (Union[List[float], Tuple[float, ...]], optional):
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_color_value(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, source=source, default_value=default_value, parent=parent, **kwargs)
def add_colormap(colors : List[Union[List[int], Tuple[int, ...]]], qualitative : bool, *, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, show: bool =True, parent: Union[int, str] =internal_dpg.mvReservedUUID_4, **kwargs) -> Union[int, str]:
""" Adds a legend that pairs colors with normalized value 0.0->1.0. Each color will be This is typically used with a heat series. (ex. [[0, 0, 0, 255], [255, 255, 255, 255]] will be mapped to a soft transition from 0.0-1.0)
Args:
colors (Any): colors that will be mapped to the normalized value 0.0->1.0
qualitative (bool): Qualitative will create hard transitions for color boundries across the value range when enabled.
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
show (bool, optional): Attempt to render widget.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_colormap(colors, qualitative, label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, show=show, parent=parent, **kwargs)
def add_colormap_button(default_value : Union[List[int], Tuple[int, ...]] =(0, 0, 0, 255), *, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, width: int =0, height: int =0, indent: int =-1, parent: Union[int, str] =0, before: Union[int, str] =0, payload_type: str ='$$DPG_PAYLOAD', callback: Callable =None, drag_callback: Callable =None, drop_callback: Callable =None, show: bool =True, enabled: bool =True, pos: Union[List[int], Tuple[int, ...]] =[], filter_key: str ='', tracked: bool =False, track_offset: float =0.5, **kwargs) -> Union[int, str]:
""" Adds a button that a color map can be bound to.
Args:
default_value (Union[List[int], Tuple[int, ...]], optional):
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
width (int, optional): Width of the item.
height (int, optional): Height of the item.
indent (int, optional): Offsets the widget to the right the specified number multiplied by the indent style.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
payload_type (str, optional): Sender string type must be the same as the target for the target to run the payload_callback.
callback (Callable, optional): Registers a callback.
drag_callback (Callable, optional): Registers a drag callback for drag and drop.
drop_callback (Callable, optional): Registers a drop callback for drag and drop.
show (bool, optional): Attempt to render widget.
enabled (bool, optional): Turns off functionality of widget and applies the disabled theme.
pos (Union[List[int], Tuple[int, ...]], optional): Places the item relative to window coordinates, [0,0] is top left.
filter_key (str, optional): Used by filter widget.
tracked (bool, optional): Scroll tracking
track_offset (float, optional): 0.0f:top, 0.5f:center, 1.0f:bottom
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_colormap_button(default_value, label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, width=width, height=height, indent=indent, parent=parent, before=before, payload_type=payload_type, callback=callback, drag_callback=drag_callback, drop_callback=drop_callback, show=show, enabled=enabled, pos=pos, filter_key=filter_key, tracked=tracked, track_offset=track_offset, **kwargs)
def add_colormap_registry(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, show: bool =False, **kwargs) -> Union[int, str]:
""" Adds a colormap registry.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
show (bool, optional): Attempt to render widget.
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_colormap_registry(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, show=show, **kwargs)
def add_colormap_scale(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, width: int =0, height: int =0, indent: int =-1, parent: Union[int, str] =0, before: Union[int, str] =0, source: Union[int, str] =0, payload_type: str ='$$DPG_PAYLOAD', drop_callback: Callable =None, show: bool =True, pos: Union[List[int], Tuple[int, ...]] =[], colormap: Union[int, str] =0, min_scale: float =0.0, max_scale: float =1.0, **kwargs) -> Union[int, str]:
""" Adds a legend that pairs values with colors. This is typically used with a heat series.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
width (int, optional): Width of the item.
height (int, optional): Height of the item.
indent (int, optional): Offsets the widget to the right the specified number multiplied by the indent style.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
source (Union[int, str], optional): Overrides 'id' as value storage key.
payload_type (str, optional): Sender string type must be the same as the target for the target to run the payload_callback.
drop_callback (Callable, optional): Registers a drop callback for drag and drop.
show (bool, optional): Attempt to render widget.
pos (Union[List[int], Tuple[int, ...]], optional): Places the item relative to window coordinates, [0,0] is top left.
colormap (Union[int, str], optional): mvPlotColormap_* constants or mvColorMap uuid from a color map registry
min_scale (float, optional): Sets the min number of the color scale. Typically is the same as the min scale from the heat series.
max_scale (float, optional): Sets the max number of the color scale. Typically is the same as the max scale from the heat series.
id (Union[int, str], optional): (deprecated)
drag_callback (Callable, optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
if 'drag_callback' in kwargs.keys():
warnings.warn('drag_callback keyword removed', DeprecationWarning, 2)
kwargs.pop('drag_callback', None)
return internal_dpg.add_colormap_scale(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, width=width, height=height, indent=indent, parent=parent, before=before, source=source, payload_type=payload_type, drop_callback=drop_callback, show=show, pos=pos, colormap=colormap, min_scale=min_scale, max_scale=max_scale, **kwargs)
def add_colormap_slider(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, width: int =0, height: int =0, indent: int =-1, parent: Union[int, str] =0, before: Union[int, str] =0, payload_type: str ='$$DPG_PAYLOAD', callback: Callable =None, drop_callback: Callable =None, show: bool =True, pos: Union[List[int], Tuple[int, ...]] =[], filter_key: str ='', tracked: bool =False, track_offset: float =0.5, default_value: float =0.0, **kwargs) -> Union[int, str]:
""" Adds a color slider that a color map can be bound to.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
width (int, optional): Width of the item.
height (int, optional): Height of the item.
indent (int, optional): Offsets the widget to the right the specified number multiplied by the indent style.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
payload_type (str, optional): Sender string type must be the same as the target for the target to run the payload_callback.
callback (Callable, optional): Registers a callback.
drop_callback (Callable, optional): Registers a drop callback for drag and drop.
show (bool, optional): Attempt to render widget.
pos (Union[List[int], Tuple[int, ...]], optional): Places the item relative to window coordinates, [0,0] is top left.
filter_key (str, optional): Used by filter widget.
tracked (bool, optional): Scroll tracking
track_offset (float, optional): 0.0f:top, 0.5f:center, 1.0f:bottom
default_value (float, optional):
id (Union[int, str], optional): (deprecated)
drag_callback (Callable, optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
if 'drag_callback' in kwargs.keys():
warnings.warn('drag_callback keyword removed', DeprecationWarning, 2)
kwargs.pop('drag_callback', None)
return internal_dpg.add_colormap_slider(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, width=width, height=height, indent=indent, parent=parent, before=before, payload_type=payload_type, callback=callback, drop_callback=drop_callback, show=show, pos=pos, filter_key=filter_key, tracked=tracked, track_offset=track_offset, default_value=default_value, **kwargs)
def add_combo(items : Union[List[str], Tuple[str, ...]] =(), *, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, width: int =0, indent: int =-1, parent: Union[int, str] =0, before: Union[int, str] =0, source: Union[int, str] =0, payload_type: str ='$$DPG_PAYLOAD', callback: Callable =None, drag_callback: Callable =None, drop_callback: Callable =None, show: bool =True, enabled: bool =True, pos: Union[List[int], Tuple[int, ...]] =[], filter_key: str ='', tracked: bool =False, track_offset: float =0.5, default_value: str ='', popup_align_left: bool =False, no_arrow_button: bool =False, no_preview: bool =False, height_mode: int =1, **kwargs) -> Union[int, str]:
""" Adds a combo dropdown that allows a user to select a single option from a drop down window. All items will be shown as selectables on the dropdown.
Args:
items (Union[List[str], Tuple[str, ...]], optional): A tuple of items to be shown in the drop down window. Can consist of any combination of types but will convert all items to strings to be shown.
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
width (int, optional): Width of the item.
indent (int, optional): Offsets the widget to the right the specified number multiplied by the indent style.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
source (Union[int, str], optional): Overrides 'id' as value storage key.
payload_type (str, optional): Sender string type must be the same as the target for the target to run the payload_callback.
callback (Callable, optional): Registers a callback.
drag_callback (Callable, optional): Registers a drag callback for drag and drop.
drop_callback (Callable, optional): Registers a drop callback for drag and drop.
show (bool, optional): Attempt to render widget.
enabled (bool, optional): Turns off functionality of widget and applies the disabled theme.
pos (Union[List[int], Tuple[int, ...]], optional): Places the item relative to window coordinates, [0,0] is top left.
filter_key (str, optional): Used by filter widget.
tracked (bool, optional): Scroll tracking
track_offset (float, optional): 0.0f:top, 0.5f:center, 1.0f:bottom
default_value (str, optional): Sets a selected item from the drop down by specifying the string value.
popup_align_left (bool, optional): Align the contents on the popup toward the left.
no_arrow_button (bool, optional): Display the preview box without the square arrow button indicating dropdown activity.
no_preview (bool, optional): Display only the square arrow button and not the selected value.
height_mode (int, optional): Controlls the number of items shown in the dropdown by the constants mvComboHeight_Small, mvComboHeight_Regular, mvComboHeight_Large, mvComboHeight_Largest
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_combo(items, label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, width=width, indent=indent, parent=parent, before=before, source=source, payload_type=payload_type, callback=callback, drag_callback=drag_callback, drop_callback=drop_callback, show=show, enabled=enabled, pos=pos, filter_key=filter_key, tracked=tracked, track_offset=track_offset, default_value=default_value, popup_align_left=popup_align_left, no_arrow_button=no_arrow_button, no_preview=no_preview, height_mode=height_mode, **kwargs)
def add_date_picker(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, indent: int =-1, parent: Union[int, str] =0, before: Union[int, str] =0, payload_type: str ='$$DPG_PAYLOAD', callback: Callable =None, drag_callback: Callable =None, drop_callback: Callable =None, show: bool =True, pos: Union[List[int], Tuple[int, ...]] =[], filter_key: str ='', tracked: bool =False, track_offset: float =0.5, default_value: dict ={'month_day': 14, 'year':20, 'month':5}, level: int =0, **kwargs) -> Union[int, str]:
""" Adds a data picker.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
indent (int, optional): Offsets the widget to the right the specified number multiplied by the indent style.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
payload_type (str, optional): Sender string type must be the same as the target for the target to run the payload_callback.
callback (Callable, optional): Registers a callback.
drag_callback (Callable, optional): Registers a drag callback for drag and drop.
drop_callback (Callable, optional): Registers a drop callback for drag and drop.
show (bool, optional): Attempt to render widget.
pos (Union[List[int], Tuple[int, ...]], optional): Places the item relative to window coordinates, [0,0] is top left.
filter_key (str, optional): Used by filter widget.
tracked (bool, optional): Scroll tracking
track_offset (float, optional): 0.0f:top, 0.5f:center, 1.0f:bottom
default_value (dict, optional):
level (int, optional): Use avaliable constants. mvDatePickerLevel_Day, mvDatePickerLevel_Month, mvDatePickerLevel_Year
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_date_picker(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, indent=indent, parent=parent, before=before, payload_type=payload_type, callback=callback, drag_callback=drag_callback, drop_callback=drop_callback, show=show, pos=pos, filter_key=filter_key, tracked=tracked, track_offset=track_offset, default_value=default_value, level=level, **kwargs)
def add_double4_value(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, source: Union[int, str] =0, default_value: Any =(0.0, 0.0, 0.0, 0.0), parent: Union[int, str] =internal_dpg.mvReservedUUID_3, **kwargs) -> Union[int, str]:
""" Adds a double value.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
source (Union[int, str], optional): Overrides 'id' as value storage key.
default_value (Any, optional):
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_double4_value(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, source=source, default_value=default_value, parent=parent, **kwargs)
def add_double_value(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, source: Union[int, str] =0, default_value: float =0.0, parent: Union[int, str] =internal_dpg.mvReservedUUID_3, **kwargs) -> Union[int, str]:
""" Adds a double value.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
source (Union[int, str], optional): Overrides 'id' as value storage key.
default_value (float, optional):
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_double_value(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, source=source, default_value=default_value, parent=parent, **kwargs)
def add_drag_float(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, width: int =0, indent: int =-1, parent: Union[int, str] =0, before: Union[int, str] =0, source: Union[int, str] =0, payload_type: str ='$$DPG_PAYLOAD', callback: Callable =None, drag_callback: Callable =None, drop_callback: Callable =None, show: bool =True, enabled: bool =True, pos: Union[List[int], Tuple[int, ...]] =[], filter_key: str ='', tracked: bool =False, track_offset: float =0.5, default_value: float =0.0, format: str ='%0.3f', speed: float =1.0, min_value: float =0.0, max_value: float =100.0, no_input: bool =False, clamped: bool =False, **kwargs) -> Union[int, str]:
""" Adds drag for a single float value. Directly entry can be done with double click or CTRL+Click. Min and Max alone are a soft limit for the drag. Use clamped keyword to also apply limits to the direct entry modes.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
width (int, optional): Width of the item.
indent (int, optional): Offsets the widget to the right the specified number multiplied by the indent style.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
source (Union[int, str], optional): Overrides 'id' as value storage key.
payload_type (str, optional): Sender string type must be the same as the target for the target to run the payload_callback.
callback (Callable, optional): Registers a callback.
drag_callback (Callable, optional): Registers a drag callback for drag and drop.
drop_callback (Callable, optional): Registers a drop callback for drag and drop.
show (bool, optional): Attempt to render widget.
enabled (bool, optional): Turns off functionality of widget and applies the disabled theme.
pos (Union[List[int], Tuple[int, ...]], optional): Places the item relative to window coordinates, [0,0] is top left.
filter_key (str, optional): Used by filter widget.
tracked (bool, optional): Scroll tracking
track_offset (float, optional): 0.0f:top, 0.5f:center, 1.0f:bottom
default_value (float, optional):
format (str, optional): Determines the format the float will be displayed as use python string formatting.
speed (float, optional): Sets the sensitivity the float will be modified while dragging.
min_value (float, optional): Applies a limit only to draging entry only.
max_value (float, optional): Applies a limit only to draging entry only.
no_input (bool, optional): Disable direct entry methods or Enter key allowing to input text directly into the widget.
clamped (bool, optional): Applies the min and max limits to direct entry methods also such as double click and CTRL+Click.
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_drag_float(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, width=width, indent=indent, parent=parent, before=before, source=source, payload_type=payload_type, callback=callback, drag_callback=drag_callback, drop_callback=drop_callback, show=show, enabled=enabled, pos=pos, filter_key=filter_key, tracked=tracked, track_offset=track_offset, default_value=default_value, format=format, speed=speed, min_value=min_value, max_value=max_value, no_input=no_input, clamped=clamped, **kwargs)
def add_drag_floatx(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, width: int =0, indent: int =-1, parent: Union[int, str] =0, before: Union[int, str] =0, source: Union[int, str] =0, payload_type: str ='$$DPG_PAYLOAD', callback: Callable =None, drag_callback: Callable =None, drop_callback: Callable =None, show: bool =True, enabled: bool =True, pos: Union[List[int], Tuple[int, ...]] =[], filter_key: str ='', tracked: bool =False, track_offset: float =0.5, default_value: Union[List[float], Tuple[float, ...]] =(0.0, 0.0, 0.0, 0.0), size: int =4, format: str ='%0.3f', speed: float =1.0, min_value: float =0.0, max_value: float =100.0, no_input: bool =False, clamped: bool =False, **kwargs) -> Union[int, str]:
""" Adds drag input for a set of float values up to 4. Directly entry can be done with double click or CTRL+Click. Min and Max alone are a soft limit for the drag. Use clamped keyword to also apply limits to the direct entry modes.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
width (int, optional): Width of the item.
indent (int, optional): Offsets the widget to the right the specified number multiplied by the indent style.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
source (Union[int, str], optional): Overrides 'id' as value storage key.
payload_type (str, optional): Sender string type must be the same as the target for the target to run the payload_callback.
callback (Callable, optional): Registers a callback.
drag_callback (Callable, optional): Registers a drag callback for drag and drop.
drop_callback (Callable, optional): Registers a drop callback for drag and drop.
show (bool, optional): Attempt to render widget.
enabled (bool, optional): Turns off functionality of widget and applies the disabled theme.
pos (Union[List[int], Tuple[int, ...]], optional): Places the item relative to window coordinates, [0,0] is top left.
filter_key (str, optional): Used by filter widget.
tracked (bool, optional): Scroll tracking
track_offset (float, optional): 0.0f:top, 0.5f:center, 1.0f:bottom
default_value (Union[List[float], Tuple[float, ...]], optional):
size (int, optional): Number of floats to be displayed.
format (str, optional): Determines the format the float will be displayed as use python string formatting.
speed (float, optional): Sets the sensitivity the float will be modified while dragging.
min_value (float, optional): Applies a limit only to draging entry only.
max_value (float, optional): Applies a limit only to draging entry only.
no_input (bool, optional): Disable direct entry methods or Enter key allowing to input text directly into the widget.
clamped (bool, optional): Applies the min and max limits to direct entry methods also such as double click and CTRL+Click.
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_drag_floatx(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, width=width, indent=indent, parent=parent, before=before, source=source, payload_type=payload_type, callback=callback, drag_callback=drag_callback, drop_callback=drop_callback, show=show, enabled=enabled, pos=pos, filter_key=filter_key, tracked=tracked, track_offset=track_offset, default_value=default_value, size=size, format=format, speed=speed, min_value=min_value, max_value=max_value, no_input=no_input, clamped=clamped, **kwargs)
def add_drag_int(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, width: int =0, indent: int =-1, parent: Union[int, str] =0, before: Union[int, str] =0, source: Union[int, str] =0, payload_type: str ='$$DPG_PAYLOAD', callback: Callable =None, drag_callback: Callable =None, drop_callback: Callable =None, show: bool =True, enabled: bool =True, pos: Union[List[int], Tuple[int, ...]] =[], filter_key: str ='', tracked: bool =False, track_offset: float =0.5, default_value: int =0, format: str ='%d', speed: float =1.0, min_value: int =0, max_value: int =100, no_input: bool =False, clamped: bool =False, **kwargs) -> Union[int, str]:
""" Adds drag for a single int value. Directly entry can be done with double click or CTRL+Click. Min and Max alone are a soft limit for the drag. Use clamped keyword to also apply limits to the direct entry modes.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
width (int, optional): Width of the item.
indent (int, optional): Offsets the widget to the right the specified number multiplied by the indent style.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
source (Union[int, str], optional): Overrides 'id' as value storage key.
payload_type (str, optional): Sender string type must be the same as the target for the target to run the payload_callback.
callback (Callable, optional): Registers a callback.
drag_callback (Callable, optional): Registers a drag callback for drag and drop.
drop_callback (Callable, optional): Registers a drop callback for drag and drop.
show (bool, optional): Attempt to render widget.
enabled (bool, optional): Turns off functionality of widget and applies the disabled theme.
pos (Union[List[int], Tuple[int, ...]], optional): Places the item relative to window coordinates, [0,0] is top left.
filter_key (str, optional): Used by filter widget.
tracked (bool, optional): Scroll tracking
track_offset (float, optional): 0.0f:top, 0.5f:center, 1.0f:bottom
default_value (int, optional):
format (str, optional): Determines the format the float will be displayed as use python string formatting.
speed (float, optional): Sets the sensitivity the float will be modified while dragging.
min_value (int, optional): Applies a limit only to draging entry only.
max_value (int, optional): Applies a limit only to draging entry only.
no_input (bool, optional): Disable direct entry methods or Enter key allowing to input text directly into the widget.
clamped (bool, optional): Applies the min and max limits to direct entry methods also such as double click and CTRL+Click.
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_drag_int(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, width=width, indent=indent, parent=parent, before=before, source=source, payload_type=payload_type, callback=callback, drag_callback=drag_callback, drop_callback=drop_callback, show=show, enabled=enabled, pos=pos, filter_key=filter_key, tracked=tracked, track_offset=track_offset, default_value=default_value, format=format, speed=speed, min_value=min_value, max_value=max_value, no_input=no_input, clamped=clamped, **kwargs)
def add_drag_intx(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, width: int =0, indent: int =-1, parent: Union[int, str] =0, before: Union[int, str] =0, source: Union[int, str] =0, payload_type: str ='$$DPG_PAYLOAD', callback: Callable =None, drag_callback: Callable =None, drop_callback: Callable =None, show: bool =True, enabled: bool =True, pos: Union[List[int], Tuple[int, ...]] =[], filter_key: str ='', tracked: bool =False, track_offset: float =0.5, default_value: Union[List[int], Tuple[int, ...]] =(0, 0, 0, 0), size: int =4, format: str ='%d', speed: float =1.0, min_value: int =0, max_value: int =100, no_input: bool =False, clamped: bool =False, **kwargs) -> Union[int, str]:
""" Adds drag input for a set of int values up to 4. Directly entry can be done with double click or CTRL+Click. Min and Max alone are a soft limit for the drag. Use clamped keyword to also apply limits to the direct entry modes.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
width (int, optional): Width of the item.
indent (int, optional): Offsets the widget to the right the specified number multiplied by the indent style.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
source (Union[int, str], optional): Overrides 'id' as value storage key.
payload_type (str, optional): Sender string type must be the same as the target for the target to run the payload_callback.
callback (Callable, optional): Registers a callback.
drag_callback (Callable, optional): Registers a drag callback for drag and drop.
drop_callback (Callable, optional): Registers a drop callback for drag and drop.
show (bool, optional): Attempt to render widget.
enabled (bool, optional): Turns off functionality of widget and applies the disabled theme.
pos (Union[List[int], Tuple[int, ...]], optional): Places the item relative to window coordinates, [0,0] is top left.
filter_key (str, optional): Used by filter widget.
tracked (bool, optional): Scroll tracking
track_offset (float, optional): 0.0f:top, 0.5f:center, 1.0f:bottom
default_value (Union[List[int], Tuple[int, ...]], optional):
size (int, optional): Number of ints to be displayed.
format (str, optional): Determines the format the int will be displayed as use python string formatting.
speed (float, optional): Sets the sensitivity the float will be modified while dragging.
min_value (int, optional): Applies a limit only to draging entry only.
max_value (int, optional): Applies a limit only to draging entry only.
no_input (bool, optional): Disable direct entry methods or Enter key allowing to input text directly into the widget.
clamped (bool, optional): Applies the min and max limits to direct entry methods also such as double click and CTRL+Click.
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_drag_intx(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, width=width, indent=indent, parent=parent, before=before, source=source, payload_type=payload_type, callback=callback, drag_callback=drag_callback, drop_callback=drop_callback, show=show, enabled=enabled, pos=pos, filter_key=filter_key, tracked=tracked, track_offset=track_offset, default_value=default_value, size=size, format=format, speed=speed, min_value=min_value, max_value=max_value, no_input=no_input, clamped=clamped, **kwargs)
def add_drag_line(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, parent: Union[int, str] =0, before: Union[int, str] =0, source: Union[int, str] =0, callback: Callable =None, show: bool =True, default_value: Any =0.0, color: Union[List[int], Tuple[int, ...]] =(0, 0, 0, -255), thickness: float =1.0, show_label: bool =True, vertical: bool =True, **kwargs) -> Union[int, str]:
""" Adds a drag line to a plot.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
source (Union[int, str], optional): Overrides 'id' as value storage key.
callback (Callable, optional): Registers a callback.
show (bool, optional): Attempt to render widget.
default_value (Any, optional):
color (Union[List[int], Tuple[int, ...]], optional):
thickness (float, optional):
show_label (bool, optional):
vertical (bool, optional):
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_drag_line(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, parent=parent, before=before, source=source, callback=callback, show=show, default_value=default_value, color=color, thickness=thickness, show_label=show_label, vertical=vertical, **kwargs)
def add_drag_payload(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, parent: Union[int, str] =0, show: bool =True, drag_data: Any =None, drop_data: Any =None, payload_type: str ='$$DPG_PAYLOAD', **kwargs) -> Union[int, str]:
""" User data payload for drag and drop operations.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
show (bool, optional): Attempt to render widget.
drag_data (Any, optional): Drag data
drop_data (Any, optional): Drop data
payload_type (str, optional):
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_drag_payload(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, parent=parent, show=show, drag_data=drag_data, drop_data=drop_data, payload_type=payload_type, **kwargs)
def add_drag_point(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, parent: Union[int, str] =0, before: Union[int, str] =0, source: Union[int, str] =0, callback: Callable =None, show: bool =True, default_value: Any =(0.0, 0.0), color: Union[List[int], Tuple[int, ...]] =(0, 0, 0, -255), thickness: float =1.0, show_label: bool =True, **kwargs) -> Union[int, str]:
""" Adds a drag point to a plot.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
source (Union[int, str], optional): Overrides 'id' as value storage key.
callback (Callable, optional): Registers a callback.
show (bool, optional): Attempt to render widget.
default_value (Any, optional):
color (Union[List[int], Tuple[int, ...]], optional):
thickness (float, optional):
show_label (bool, optional):
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_drag_point(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, parent=parent, before=before, source=source, callback=callback, show=show, default_value=default_value, color=color, thickness=thickness, show_label=show_label, **kwargs)
def add_draw_layer(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, parent: Union[int, str] =0, before: Union[int, str] =0, show: bool =True, perspective_divide: bool =False, depth_clipping: bool =False, cull_mode: int =0, **kwargs) -> Union[int, str]:
""" New in 1.1. Creates a layer useful for grouping drawlist items.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
show (bool, optional): Attempt to render widget.
perspective_divide (bool, optional): New in 1.1. apply perspective divide
depth_clipping (bool, optional): New in 1.1. apply depth clipping
cull_mode (int, optional): New in 1.1. culling mode, mvCullMode_* constants. Only works with triangles currently.
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_draw_layer(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, parent=parent, before=before, show=show, perspective_divide=perspective_divide, depth_clipping=depth_clipping, cull_mode=cull_mode, **kwargs)
def add_draw_node(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, parent: Union[int, str] =0, before: Union[int, str] =0, show: bool =True, **kwargs) -> Union[int, str]:
""" New in 1.1. Creates a drawing node to associate a transformation matrix. Child node matricies will concatenate.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
show (bool, optional): Attempt to render widget.
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_draw_node(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, parent=parent, before=before, show=show, **kwargs)
def add_drawlist(width : int, height : int, *, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, parent: Union[int, str] =0, before: Union[int, str] =0, callback: Callable =None, show: bool =True, pos: Union[List[int], Tuple[int, ...]] =[], filter_key: str ='', delay_search: bool =False, tracked: bool =False, track_offset: float =0.5, **kwargs) -> Union[int, str]:
""" Adds a drawing canvas.
Args:
width (int):
height (int):
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
callback (Callable, optional): Registers a callback.
show (bool, optional): Attempt to render widget.
pos (Union[List[int], Tuple[int, ...]], optional): Places the item relative to window coordinates, [0,0] is top left.
filter_key (str, optional): Used by filter widget.
delay_search (bool, optional): Delays searching container for specified items until the end of the app. Possible optimization when a container has many children that are not accessed often.
tracked (bool, optional): Scroll tracking
track_offset (float, optional): 0.0f:top, 0.5f:center, 1.0f:bottom
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_drawlist(width, height, label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, parent=parent, before=before, callback=callback, show=show, pos=pos, filter_key=filter_key, delay_search=delay_search, tracked=tracked, track_offset=track_offset, **kwargs)
def add_dynamic_texture(width : int, height : int, default_value : Union[List[float], Tuple[float, ...]], *, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, parent: Union[int, str] =internal_dpg.mvReservedUUID_2, **kwargs) -> Union[int, str]:
""" Adds a dynamic texture.
Args:
width (int):
height (int):
default_value (Union[List[float], Tuple[float, ...]]):
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_dynamic_texture(width, height, default_value, label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, parent=parent, **kwargs)
def add_error_series(x : Union[List[float], Tuple[float, ...]], y : Union[List[float], Tuple[float, ...]], negative : Union[List[float], Tuple[float, ...]], positive : Union[List[float], Tuple[float, ...]], *, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, parent: Union[int, str] =0, before: Union[int, str] =0, source: Union[int, str] =0, show: bool =True, contribute_to_bounds: bool =True, horizontal: bool =False, **kwargs) -> Union[int, str]:
""" Adds an error series to a plot.
Args:
x (Any):
y (Any):
negative (Any):
positive (Any):
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
source (Union[int, str], optional): Overrides 'id' as value storage key.
show (bool, optional): Attempt to render widget.
contribute_to_bounds (bool, optional):
horizontal (bool, optional):
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_error_series(x, y, negative, positive, label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, parent=parent, before=before, source=source, show=show, contribute_to_bounds=contribute_to_bounds, horizontal=horizontal, **kwargs)
def add_file_dialog(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, width: int =0, height: int =0, callback: Callable =None, show: bool =True, default_path: str ='', default_filename: str ='.', file_count: int =0, modal: bool =False, directory_selector: bool =False, min_size: Union[List[int], Tuple[int, ...]] =[100, 100], max_size: Union[List[int], Tuple[int, ...]] =[30000, 30000], **kwargs) -> Union[int, str]:
""" Displays a file or directory selector depending on keywords. Displays a file dialog by default. Callback will be ran when the file or directory picker is closed. The app_data arguemnt will be populated with information related to the file and directory as a dictionary.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
width (int, optional): Width of the item.
height (int, optional): Height of the item.
callback (Callable, optional): Registers a callback.
show (bool, optional): Attempt to render widget.
default_path (str, optional): Path that the file dialog will default to when opened.
default_filename (str, optional): Default name that will show in the file name input.
file_count (int, optional): Number of visible files in the dialog.
modal (bool, optional): Forces user interaction with the file selector.
directory_selector (bool, optional): Shows only directory/paths as options. Allows selection of directory/paths only.
min_size (Union[List[int], Tuple[int, ...]], optional): Minimum window size.
max_size (Union[List[int], Tuple[int, ...]], optional): Maximum window size.
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_file_dialog(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, width=width, height=height, callback=callback, show=show, default_path=default_path, default_filename=default_filename, file_count=file_count, modal=modal, directory_selector=directory_selector, min_size=min_size, max_size=max_size, **kwargs)
def add_file_extension(extension : str, *, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, width: int =0, height: int =0, parent: Union[int, str] =0, before: Union[int, str] =0, custom_text: str ='', color: Union[List[int], Tuple[int, ...]] =(-255, 0, 0, 255), **kwargs) -> Union[int, str]:
""" Creates a file extension filter option in the file dialog.
Args:
extension (str): Extension that will show as an when the parent is a file dialog.
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
width (int, optional): Width of the item.
height (int, optional): Height of the item.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
custom_text (str, optional): Replaces the displayed text in the drop down for this extension.
color (Union[List[int], Tuple[int, ...]], optional): Color for the text that will be shown with specified extensions.
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_file_extension(extension, label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, width=width, height=height, parent=parent, before=before, custom_text=custom_text, color=color, **kwargs)
def add_filter_set(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, width: int =0, indent: int =-1, parent: Union[int, str] =0, before: Union[int, str] =0, show: bool =True, delay_search: bool =False, **kwargs) -> Union[int, str]:
""" Helper to parse and apply text filters (e.g. aaaaa[, bbbbb][, ccccc])
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
width (int, optional): Width of the item.
indent (int, optional): Offsets the widget to the right the specified number multiplied by the indent style.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
show (bool, optional): Attempt to render widget.
delay_search (bool, optional): Delays searching container for specified items until the end of the app. Possible optimization when a container has many children that are not accessed often.
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_filter_set(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, width=width, indent=indent, parent=parent, before=before, show=show, delay_search=delay_search, **kwargs)
def add_float4_value(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, source: Union[int, str] =0, default_value: Union[List[float], Tuple[float, ...]] =(0.0, 0.0, 0.0, 0.0), parent: Union[int, str] =internal_dpg.mvReservedUUID_3, **kwargs) -> Union[int, str]:
""" Adds a float4 value.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
source (Union[int, str], optional): Overrides 'id' as value storage key.
default_value (Union[List[float], Tuple[float, ...]], optional):
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_float4_value(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, source=source, default_value=default_value, parent=parent, **kwargs)
def add_float_value(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, source: Union[int, str] =0, default_value: float =0.0, parent: Union[int, str] =internal_dpg.mvReservedUUID_3, **kwargs) -> Union[int, str]:
""" Adds a float value.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
source (Union[int, str], optional): Overrides 'id' as value storage key.
default_value (float, optional):
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_float_value(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, source=source, default_value=default_value, parent=parent, **kwargs)
def add_float_vect_value(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, source: Union[int, str] =0, default_value: Union[List[float], Tuple[float, ...]] =(), parent: Union[int, str] =internal_dpg.mvReservedUUID_3, **kwargs) -> Union[int, str]:
""" Adds a float vect value.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
source (Union[int, str], optional): Overrides 'id' as value storage key.
default_value (Union[List[float], Tuple[float, ...]], optional):
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_float_vect_value(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, source=source, default_value=default_value, parent=parent, **kwargs)
def add_font(file : str, size : int, *, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, parent: Union[int, str] =internal_dpg.mvReservedUUID_0, **kwargs) -> Union[int, str]:
""" Adds font to a font registry.
Args:
file (str):
size (int):
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
id (Union[int, str], optional): (deprecated)
default_font (bool, optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
if 'default_font' in kwargs.keys():
warnings.warn('default_font keyword removed', DeprecationWarning, 2)
kwargs.pop('default_font', None)
return internal_dpg.add_font(file, size, label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, parent=parent, **kwargs)
def add_font_chars(chars : Union[List[int], Tuple[int, ...]], *, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, parent: Union[int, str] =0, **kwargs) -> Union[int, str]:
""" Adds specific font characters to a font.
Args:
chars (Union[List[int], Tuple[int, ...]]):
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_font_chars(chars, label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, parent=parent, **kwargs)
def add_font_range(first_char : int, last_char : int, *, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, parent: Union[int, str] =0, **kwargs) -> Union[int, str]:
""" Adds a range of font characters to a font.
Args:
first_char (int):
last_char (int):
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_font_range(first_char, last_char, label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, parent=parent, **kwargs)
def add_font_range_hint(hint : int, *, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, parent: Union[int, str] =0, **kwargs) -> Union[int, str]:
""" Adds a range of font characters (mvFontRangeHint_ constants).
Args:
hint (int):
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_font_range_hint(hint, label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, parent=parent, **kwargs)
def add_font_registry(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, show: bool =True, **kwargs) -> Union[int, str]:
""" Adds a font registry.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
show (bool, optional): Attempt to render widget.
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_font_registry(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, show=show, **kwargs)
def add_group(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, width: int =0, indent: int =-1, parent: Union[int, str] =0, before: Union[int, str] =0, payload_type: str ='$$DPG_PAYLOAD', drag_callback: Callable =None, drop_callback: Callable =None, show: bool =True, pos: Union[List[int], Tuple[int, ...]] =[], filter_key: str ='', delay_search: bool =False, tracked: bool =False, track_offset: float =0.5, horizontal: bool =False, horizontal_spacing: float =-1, xoffset: float =0.0, **kwargs) -> Union[int, str]:
""" Creates a group that other widgets can belong to. The group allows item commands to be issued for all of its members.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
width (int, optional): Width of the item.
indent (int, optional): Offsets the widget to the right the specified number multiplied by the indent style.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
payload_type (str, optional): Sender string type must be the same as the target for the target to run the payload_callback.
drag_callback (Callable, optional): Registers a drag callback for drag and drop.
drop_callback (Callable, optional): Registers a drop callback for drag and drop.
show (bool, optional): Attempt to render widget.
pos (Union[List[int], Tuple[int, ...]], optional): Places the item relative to window coordinates, [0,0] is top left.
filter_key (str, optional): Used by filter widget.
delay_search (bool, optional): Delays searching container for specified items until the end of the app. Possible optimization when a container has many children that are not accessed often.
tracked (bool, optional): Scroll tracking
track_offset (float, optional): 0.0f:top, 0.5f:center, 1.0f:bottom
horizontal (bool, optional): Forces child widgets to be added in a horizontal layout.
horizontal_spacing (float, optional): Spacing for the horizontal layout.
xoffset (float, optional): Offset from containing window x item location within group.
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_group(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, width=width, indent=indent, parent=parent, before=before, payload_type=payload_type, drag_callback=drag_callback, drop_callback=drop_callback, show=show, pos=pos, filter_key=filter_key, delay_search=delay_search, tracked=tracked, track_offset=track_offset, horizontal=horizontal, horizontal_spacing=horizontal_spacing, xoffset=xoffset, **kwargs)
def add_handler_registry(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, show: bool =True, **kwargs) -> Union[int, str]:
""" Adds a handler registry.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
show (bool, optional): Attempt to render widget.
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_handler_registry(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, show=show, **kwargs)
def add_heat_series(x : Union[List[float], Tuple[float, ...]], rows : int, cols : int, *, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, parent: Union[int, str] =0, before: Union[int, str] =0, source: Union[int, str] =0, show: bool =True, scale_min: float =0.0, scale_max: float =1.0, bounds_min: Any =(0.0, 0.0), bounds_max: Any =(1.0, 1.0), format: str ='%0.1f', contribute_to_bounds: bool =True, **kwargs) -> Union[int, str]:
""" Adds a heat series to a plot.
Args:
x (Any):
rows (int):
cols (int):
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
source (Union[int, str], optional): Overrides 'id' as value storage key.
show (bool, optional): Attempt to render widget.
scale_min (float, optional): Sets the color scale min. Typically paired with the color scale widget scale_min.
scale_max (float, optional): Sets the color scale max. Typically paired with the color scale widget scale_max.
bounds_min (Any, optional):
bounds_max (Any, optional):
format (str, optional):
contribute_to_bounds (bool, optional):
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_heat_series(x, rows, cols, label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, parent=parent, before=before, source=source, show=show, scale_min=scale_min, scale_max=scale_max, bounds_min=bounds_min, bounds_max=bounds_max, format=format, contribute_to_bounds=contribute_to_bounds, **kwargs)
def add_histogram_series(x : Union[List[float], Tuple[float, ...]], *, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, parent: Union[int, str] =0, before: Union[int, str] =0, source: Union[int, str] =0, show: bool =True, bins: int =-1, bar_scale: float =1.0, min_range: float =0.0, max_range: float =1.0, cumlative: bool =False, density: bool =False, outliers: bool =True, contribute_to_bounds: bool =True, **kwargs) -> Union[int, str]:
""" Adds a histogram series to a plot.
Args:
x (Any):
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
source (Union[int, str], optional): Overrides 'id' as value storage key.
show (bool, optional): Attempt to render widget.
bins (int, optional):
bar_scale (float, optional):
min_range (float, optional):
max_range (float, optional):
cumlative (bool, optional):
density (bool, optional):
outliers (bool, optional):
contribute_to_bounds (bool, optional):
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_histogram_series(x, label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, parent=parent, before=before, source=source, show=show, bins=bins, bar_scale=bar_scale, min_range=min_range, max_range=max_range, cumlative=cumlative, density=density, outliers=outliers, contribute_to_bounds=contribute_to_bounds, **kwargs)
def add_hline_series(x : Union[List[float], Tuple[float, ...]], *, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, parent: Union[int, str] =0, before: Union[int, str] =0, source: Union[int, str] =0, show: bool =True, **kwargs) -> Union[int, str]:
""" Adds an infinite horizontal line series to a plot.
Args:
x (Any):
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
source (Union[int, str], optional): Overrides 'id' as value storage key.
show (bool, optional): Attempt to render widget.
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_hline_series(x, label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, parent=parent, before=before, source=source, show=show, **kwargs)
def add_image(texture_tag : Union[int, str], *, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, width: int =0, height: int =0, indent: int =-1, parent: Union[int, str] =0, before: Union[int, str] =0, source: Union[int, str] =0, payload_type: str ='$$DPG_PAYLOAD', drag_callback: Callable =None, drop_callback: Callable =None, show: bool =True, pos: Union[List[int], Tuple[int, ...]] =[], filter_key: str ='', tracked: bool =False, track_offset: float =0.5, tint_color: Union[List[float], Tuple[float, ...]] =(255, 255, 255, 255), border_color: Union[List[float], Tuple[float, ...]] =(0, 0, 0, 0), uv_min: Union[List[float], Tuple[float, ...]] =(0.0, 0.0), uv_max: Union[List[float], Tuple[float, ...]] =(1.0, 1.0), **kwargs) -> Union[int, str]:
""" Adds an image from a specified texture. uv_min and uv_max represent the normalized texture coordinates of the original image that will be shown. Using range (0.0,0.0)->(1.0,1.0) for texture coordinates will generally display the entire texture.
Args:
texture_tag (Union[int, str]): The texture_tag should come from a texture that was added to a texture registry.
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
width (int, optional): Width of the item.
height (int, optional): Height of the item.
indent (int, optional): Offsets the widget to the right the specified number multiplied by the indent style.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
source (Union[int, str], optional): Overrides 'id' as value storage key.
payload_type (str, optional): Sender string type must be the same as the target for the target to run the payload_callback.
drag_callback (Callable, optional): Registers a drag callback for drag and drop.
drop_callback (Callable, optional): Registers a drop callback for drag and drop.
show (bool, optional): Attempt to render widget.
pos (Union[List[int], Tuple[int, ...]], optional): Places the item relative to window coordinates, [0,0] is top left.
filter_key (str, optional): Used by filter widget.
tracked (bool, optional): Scroll tracking
track_offset (float, optional): 0.0f:top, 0.5f:center, 1.0f:bottom
tint_color (Union[List[float], Tuple[float, ...]], optional): Applies a color tint to the entire texture.
border_color (Union[List[float], Tuple[float, ...]], optional): Displays a border of the specified color around the texture. If the theme style has turned off the border it will not be shown.
uv_min (Union[List[float], Tuple[float, ...]], optional): Normalized texture coordinates min point.
uv_max (Union[List[float], Tuple[float, ...]], optional): Normalized texture coordinates max point.
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_image(texture_tag, label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, width=width, height=height, indent=indent, parent=parent, before=before, source=source, payload_type=payload_type, drag_callback=drag_callback, drop_callback=drop_callback, show=show, pos=pos, filter_key=filter_key, tracked=tracked, track_offset=track_offset, tint_color=tint_color, border_color=border_color, uv_min=uv_min, uv_max=uv_max, **kwargs)
def add_image_button(texture_tag : Union[int, str], *, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, width: int =0, height: int =0, indent: int =-1, parent: Union[int, str] =0, before: Union[int, str] =0, source: Union[int, str] =0, payload_type: str ='$$DPG_PAYLOAD', callback: Callable =None, drag_callback: Callable =None, drop_callback: Callable =None, show: bool =True, enabled: bool =True, pos: Union[List[int], Tuple[int, ...]] =[], filter_key: str ='', tracked: bool =False, track_offset: float =0.5, frame_padding: int =-1, tint_color: Union[List[float], Tuple[float, ...]] =(255, 255, 255, 255), background_color: Union[List[float], Tuple[float, ...]] =(0, 0, 0, 0), uv_min: Union[List[float], Tuple[float, ...]] =(0.0, 0.0), uv_max: Union[List[float], Tuple[float, ...]] =(1.0, 1.0), **kwargs) -> Union[int, str]:
""" Adds an button with a texture. uv_min and uv_max represent the normalized texture coordinates of the original image that will be shown. Using range (0.0,0.0)->(1.0,1.0) texture coordinates will generally display the entire texture
Args:
texture_tag (Union[int, str]): The texture_tag should come from a texture that was added to a texture registry.
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
width (int, optional): Width of the item.
height (int, optional): Height of the item.
indent (int, optional): Offsets the widget to the right the specified number multiplied by the indent style.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
source (Union[int, str], optional): Overrides 'id' as value storage key.
payload_type (str, optional): Sender string type must be the same as the target for the target to run the payload_callback.
callback (Callable, optional): Registers a callback.
drag_callback (Callable, optional): Registers a drag callback for drag and drop.
drop_callback (Callable, optional): Registers a drop callback for drag and drop.
show (bool, optional): Attempt to render widget.
enabled (bool, optional): Turns off functionality of widget and applies the disabled theme.
pos (Union[List[int], Tuple[int, ...]], optional): Places the item relative to window coordinates, [0,0] is top left.
filter_key (str, optional): Used by filter widget.
tracked (bool, optional): Scroll tracking
track_offset (float, optional): 0.0f:top, 0.5f:center, 1.0f:bottom
frame_padding (int, optional): Empty space around the outside of the texture. Button will show around the texture.
tint_color (Union[List[float], Tuple[float, ...]], optional): Applies a color tint to the entire texture.
background_color (Union[List[float], Tuple[float, ...]], optional): Displays a border of the specified color around the texture.
uv_min (Union[List[float], Tuple[float, ...]], optional): Normalized texture coordinates min point.
uv_max (Union[List[float], Tuple[float, ...]], optional): Normalized texture coordinates max point.
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_image_button(texture_tag, label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, width=width, height=height, indent=indent, parent=parent, before=before, source=source, payload_type=payload_type, callback=callback, drag_callback=drag_callback, drop_callback=drop_callback, show=show, enabled=enabled, pos=pos, filter_key=filter_key, tracked=tracked, track_offset=track_offset, frame_padding=frame_padding, tint_color=tint_color, background_color=background_color, uv_min=uv_min, uv_max=uv_max, **kwargs)
def add_image_series(texture_tag : Union[int, str], bounds_min : Union[List[float], Tuple[float, ...]], bounds_max : Union[List[float], Tuple[float, ...]], *, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, parent: Union[int, str] =0, before: Union[int, str] =0, source: Union[int, str] =0, show: bool =True, uv_min: Union[List[float], Tuple[float, ...]] =(0.0, 0.0), uv_max: Union[List[float], Tuple[float, ...]] =(1.0, 1.0), tint_color: Union[List[int], Tuple[int, ...]] =(255, 255, 255, 255), **kwargs) -> Union[int, str]:
""" Adds an image series to a plot.
Args:
texture_tag (Union[int, str]):
bounds_min (Any):
bounds_max (Any):
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
source (Union[int, str], optional): Overrides 'id' as value storage key.
show (bool, optional): Attempt to render widget.
uv_min (Union[List[float], Tuple[float, ...]], optional): normalized texture coordinates
uv_max (Union[List[float], Tuple[float, ...]], optional): normalized texture coordinates
tint_color (Union[List[int], Tuple[int, ...]], optional):
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_image_series(texture_tag, bounds_min, bounds_max, label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, parent=parent, before=before, source=source, show=show, uv_min=uv_min, uv_max=uv_max, tint_color=tint_color, **kwargs)
def add_input_float(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, width: int =0, indent: int =-1, parent: Union[int, str] =0, before: Union[int, str] =0, source: Union[int, str] =0, payload_type: str ='$$DPG_PAYLOAD', callback: Callable =None, drag_callback: Callable =None, drop_callback: Callable =None, show: bool =True, enabled: bool =True, pos: Union[List[int], Tuple[int, ...]] =[], filter_key: str ='', tracked: bool =False, track_offset: float =0.5, default_value: float =0.0, format: str ='%.3f', min_value: float =0.0, max_value: float =100.0, step: float =0.1, step_fast: float =1.0, min_clamped: bool =False, max_clamped: bool =False, on_enter: bool =False, readonly: bool =False, **kwargs) -> Union[int, str]:
""" Adds input for an float. +/- buttons can be activated by setting the value of step.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
width (int, optional): Width of the item.
indent (int, optional): Offsets the widget to the right the specified number multiplied by the indent style.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
source (Union[int, str], optional): Overrides 'id' as value storage key.
payload_type (str, optional): Sender string type must be the same as the target for the target to run the payload_callback.
callback (Callable, optional): Registers a callback.
drag_callback (Callable, optional): Registers a drag callback for drag and drop.
drop_callback (Callable, optional): Registers a drop callback for drag and drop.
show (bool, optional): Attempt to render widget.
enabled (bool, optional): Turns off functionality of widget and applies the disabled theme.
pos (Union[List[int], Tuple[int, ...]], optional): Places the item relative to window coordinates, [0,0] is top left.
filter_key (str, optional): Used by filter widget.
tracked (bool, optional): Scroll tracking
track_offset (float, optional): 0.0f:top, 0.5f:center, 1.0f:bottom
default_value (float, optional):
format (str, optional): Determines the format the float will be displayed as use python string formatting.
min_value (float, optional): Value for lower limit of input. By default this limits the step buttons. Use min_clamped to limit manual input.
max_value (float, optional): Value for upper limit of input. By default this limits the step buttons. Use max_clamped to limit manual input.
step (float, optional): Increment to change value by when the step buttons are pressed. Setting this to a value of 0 or smaller will turn off step buttons.
step_fast (float, optional): After holding the step buttons for extended time the increments will switch to this value.
min_clamped (bool, optional): Activates and deactivates the enforcment of min_value.
max_clamped (bool, optional): Activates and deactivates the enforcment of max_value.
on_enter (bool, optional): Only runs callback on enter key press.
readonly (bool, optional): Activates read only mode where no text can be input but text can still be highlighted.
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_input_float(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, width=width, indent=indent, parent=parent, before=before, source=source, payload_type=payload_type, callback=callback, drag_callback=drag_callback, drop_callback=drop_callback, show=show, enabled=enabled, pos=pos, filter_key=filter_key, tracked=tracked, track_offset=track_offset, default_value=default_value, format=format, min_value=min_value, max_value=max_value, step=step, step_fast=step_fast, min_clamped=min_clamped, max_clamped=max_clamped, on_enter=on_enter, readonly=readonly, **kwargs)
def add_input_floatx(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, width: int =0, indent: int =-1, parent: Union[int, str] =0, before: Union[int, str] =0, source: Union[int, str] =0, payload_type: str ='$$DPG_PAYLOAD', callback: Callable =None, drag_callback: Callable =None, drop_callback: Callable =None, show: bool =True, enabled: bool =True, pos: Union[List[int], Tuple[int, ...]] =[], filter_key: str ='', tracked: bool =False, track_offset: float =0.5, default_value: Union[List[float], Tuple[float, ...]] =(0.0, 0.0, 0.0, 0.0), format: str ='%.3f', min_value: float =0.0, max_value: float =100.0, size: int =4, min_clamped: bool =False, max_clamped: bool =False, on_enter: bool =False, readonly: bool =False, **kwargs) -> Union[int, str]:
""" Adds multi float input for up to 4 float values.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
width (int, optional): Width of the item.
indent (int, optional): Offsets the widget to the right the specified number multiplied by the indent style.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
source (Union[int, str], optional): Overrides 'id' as value storage key.
payload_type (str, optional): Sender string type must be the same as the target for the target to run the payload_callback.
callback (Callable, optional): Registers a callback.
drag_callback (Callable, optional): Registers a drag callback for drag and drop.
drop_callback (Callable, optional): Registers a drop callback for drag and drop.
show (bool, optional): Attempt to render widget.
enabled (bool, optional): Turns off functionality of widget and applies the disabled theme.
pos (Union[List[int], Tuple[int, ...]], optional): Places the item relative to window coordinates, [0,0] is top left.
filter_key (str, optional): Used by filter widget.
tracked (bool, optional): Scroll tracking
track_offset (float, optional): 0.0f:top, 0.5f:center, 1.0f:bottom
default_value (Union[List[float], Tuple[float, ...]], optional):
format (str, optional): Determines the format the float will be displayed as use python string formatting.
min_value (float, optional): Value for lower limit of input for each cell. Use min_clamped to turn on.
max_value (float, optional): Value for upper limit of input for each cell. Use max_clamped to turn on.
size (int, optional): Number of components displayed for input.
min_clamped (bool, optional): Activates and deactivates the enforcment of min_value.
max_clamped (bool, optional): Activates and deactivates the enforcment of max_value.
on_enter (bool, optional): Only runs callback on enter key press.
readonly (bool, optional): Activates read only mode where no text can be input but text can still be highlighted.
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_input_floatx(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, width=width, indent=indent, parent=parent, before=before, source=source, payload_type=payload_type, callback=callback, drag_callback=drag_callback, drop_callback=drop_callback, show=show, enabled=enabled, pos=pos, filter_key=filter_key, tracked=tracked, track_offset=track_offset, default_value=default_value, format=format, min_value=min_value, max_value=max_value, size=size, min_clamped=min_clamped, max_clamped=max_clamped, on_enter=on_enter, readonly=readonly, **kwargs)
def add_input_int(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, width: int =0, indent: int =-1, parent: Union[int, str] =0, before: Union[int, str] =0, source: Union[int, str] =0, payload_type: str ='$$DPG_PAYLOAD', callback: Callable =None, drag_callback: Callable =None, drop_callback: Callable =None, show: bool =True, enabled: bool =True, pos: Union[List[int], Tuple[int, ...]] =[], filter_key: str ='', tracked: bool =False, track_offset: float =0.5, default_value: int =0, min_value: int =0, max_value: int =100, step: int =1, step_fast: int =100, min_clamped: bool =False, max_clamped: bool =False, on_enter: bool =False, readonly: bool =False, **kwargs) -> Union[int, str]:
""" Adds input for an int. +/- buttons can be activated by setting the value of step.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
width (int, optional): Width of the item.
indent (int, optional): Offsets the widget to the right the specified number multiplied by the indent style.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
source (Union[int, str], optional): Overrides 'id' as value storage key.
payload_type (str, optional): Sender string type must be the same as the target for the target to run the payload_callback.
callback (Callable, optional): Registers a callback.
drag_callback (Callable, optional): Registers a drag callback for drag and drop.
drop_callback (Callable, optional): Registers a drop callback for drag and drop.
show (bool, optional): Attempt to render widget.
enabled (bool, optional): Turns off functionality of widget and applies the disabled theme.
pos (Union[List[int], Tuple[int, ...]], optional): Places the item relative to window coordinates, [0,0] is top left.
filter_key (str, optional): Used by filter widget.
tracked (bool, optional): Scroll tracking
track_offset (float, optional): 0.0f:top, 0.5f:center, 1.0f:bottom
default_value (int, optional):
min_value (int, optional): Value for lower limit of input. By default this limits the step buttons. Use min_clamped to limit manual input.
max_value (int, optional): Value for upper limit of input. By default this limits the step buttons. Use max_clamped to limit manual input.
step (int, optional): Increment to change value by when the step buttons are pressed. Setting this to a value of 0 or smaller will turn off step buttons.
step_fast (int, optional): After holding the step buttons for extended time the increments will switch to this value.
min_clamped (bool, optional): Activates and deactivates the enforcment of min_value.
max_clamped (bool, optional): Activates and deactivates the enforcment of max_value.
on_enter (bool, optional): Only runs callback on enter key press.
readonly (bool, optional): Activates read only mode where no text can be input but text can still be highlighted.
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_input_int(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, width=width, indent=indent, parent=parent, before=before, source=source, payload_type=payload_type, callback=callback, drag_callback=drag_callback, drop_callback=drop_callback, show=show, enabled=enabled, pos=pos, filter_key=filter_key, tracked=tracked, track_offset=track_offset, default_value=default_value, min_value=min_value, max_value=max_value, step=step, step_fast=step_fast, min_clamped=min_clamped, max_clamped=max_clamped, on_enter=on_enter, readonly=readonly, **kwargs)
def add_input_intx(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, width: int =0, indent: int =-1, parent: Union[int, str] =0, before: Union[int, str] =0, source: Union[int, str] =0, payload_type: str ='$$DPG_PAYLOAD', callback: Callable =None, drag_callback: Callable =None, drop_callback: Callable =None, show: bool =True, enabled: bool =True, pos: Union[List[int], Tuple[int, ...]] =[], filter_key: str ='', tracked: bool =False, track_offset: float =0.5, default_value: Union[List[int], Tuple[int, ...]] =(0, 0, 0, 0), min_value: int =0, max_value: int =100, size: int =4, min_clamped: bool =False, max_clamped: bool =False, on_enter: bool =False, readonly: bool =False, **kwargs) -> Union[int, str]:
""" Adds multi int input for up to 4 integer values.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
width (int, optional): Width of the item.
indent (int, optional): Offsets the widget to the right the specified number multiplied by the indent style.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
source (Union[int, str], optional): Overrides 'id' as value storage key.
payload_type (str, optional): Sender string type must be the same as the target for the target to run the payload_callback.
callback (Callable, optional): Registers a callback.
drag_callback (Callable, optional): Registers a drag callback for drag and drop.
drop_callback (Callable, optional): Registers a drop callback for drag and drop.
show (bool, optional): Attempt to render widget.
enabled (bool, optional): Turns off functionality of widget and applies the disabled theme.
pos (Union[List[int], Tuple[int, ...]], optional): Places the item relative to window coordinates, [0,0] is top left.
filter_key (str, optional): Used by filter widget.
tracked (bool, optional): Scroll tracking
track_offset (float, optional): 0.0f:top, 0.5f:center, 1.0f:bottom
default_value (Union[List[int], Tuple[int, ...]], optional):
min_value (int, optional): Value for lower limit of input for each cell. Use min_clamped to turn on.
max_value (int, optional): Value for upper limit of input for each cell. Use max_clamped to turn on.
size (int, optional): Number of components displayed for input.
min_clamped (bool, optional): Activates and deactivates the enforcment of min_value.
max_clamped (bool, optional): Activates and deactivates the enforcment of max_value.
on_enter (bool, optional): Only runs callback on enter.
readonly (bool, optional): Activates read only mode where no text can be input but text can still be highlighted.
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_input_intx(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, width=width, indent=indent, parent=parent, before=before, source=source, payload_type=payload_type, callback=callback, drag_callback=drag_callback, drop_callback=drop_callback, show=show, enabled=enabled, pos=pos, filter_key=filter_key, tracked=tracked, track_offset=track_offset, default_value=default_value, min_value=min_value, max_value=max_value, size=size, min_clamped=min_clamped, max_clamped=max_clamped, on_enter=on_enter, readonly=readonly, **kwargs)
def add_input_text(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, width: int =0, height: int =0, indent: int =-1, parent: Union[int, str] =0, before: Union[int, str] =0, source: Union[int, str] =0, payload_type: str ='$$DPG_PAYLOAD', callback: Callable =None, drag_callback: Callable =None, drop_callback: Callable =None, show: bool =True, enabled: bool =True, pos: Union[List[int], Tuple[int, ...]] =[], filter_key: str ='', tracked: bool =False, track_offset: float =0.5, default_value: str ='', hint: str ='', multiline: bool =False, no_spaces: bool =False, uppercase: bool =False, tab_input: bool =False, decimal: bool =False, hexadecimal: bool =False, readonly: bool =False, password: bool =False, scientific: bool =False, on_enter: bool =False, **kwargs) -> Union[int, str]:
""" Adds input for text.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
width (int, optional): Width of the item.
height (int, optional): Height of the item.
indent (int, optional): Offsets the widget to the right the specified number multiplied by the indent style.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
source (Union[int, str], optional): Overrides 'id' as value storage key.
payload_type (str, optional): Sender string type must be the same as the target for the target to run the payload_callback.
callback (Callable, optional): Registers a callback.
drag_callback (Callable, optional): Registers a drag callback for drag and drop.
drop_callback (Callable, optional): Registers a drop callback for drag and drop.
show (bool, optional): Attempt to render widget.
enabled (bool, optional): Turns off functionality of widget and applies the disabled theme.
pos (Union[List[int], Tuple[int, ...]], optional): Places the item relative to window coordinates, [0,0] is top left.
filter_key (str, optional): Used by filter widget.
tracked (bool, optional): Scroll tracking
track_offset (float, optional): 0.0f:top, 0.5f:center, 1.0f:bottom
default_value (str, optional):
hint (str, optional): Displayed only when value is an empty string. Will reappear if input value is set to empty string. Will not show if default value is anything other than default empty string.
multiline (bool, optional): Allows for multiline text input.
no_spaces (bool, optional): Filter out spaces and tabs.
uppercase (bool, optional): Automatically make all inputs uppercase.
tab_input (bool, optional): Allows tabs to be input into the string value instead of changing item focus.
decimal (bool, optional): Only allow characters 0123456789.+-*/
hexadecimal (bool, optional): Only allow characters 0123456789ABCDEFabcdef
readonly (bool, optional): Activates read only mode where no text can be input but text can still be highlighted.
password (bool, optional): Display all input characters as '*'.
scientific (bool, optional): Only allow characters 0123456789.+-*/eE (Scientific notation input)
on_enter (bool, optional): Only runs callback on enter key press.
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_input_text(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, width=width, height=height, indent=indent, parent=parent, before=before, source=source, payload_type=payload_type, callback=callback, drag_callback=drag_callback, drop_callback=drop_callback, show=show, enabled=enabled, pos=pos, filter_key=filter_key, tracked=tracked, track_offset=track_offset, default_value=default_value, hint=hint, multiline=multiline, no_spaces=no_spaces, uppercase=uppercase, tab_input=tab_input, decimal=decimal, hexadecimal=hexadecimal, readonly=readonly, password=password, scientific=scientific, on_enter=on_enter, **kwargs)
def add_int4_value(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, source: Union[int, str] =0, default_value: Union[List[int], Tuple[int, ...]] =(0, 0, 0, 0), parent: Union[int, str] =internal_dpg.mvReservedUUID_3, **kwargs) -> Union[int, str]:
""" Adds a int4 value.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
source (Union[int, str], optional): Overrides 'id' as value storage key.
default_value (Union[List[int], Tuple[int, ...]], optional):
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_int4_value(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, source=source, default_value=default_value, parent=parent, **kwargs)
def add_int_value(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, source: Union[int, str] =0, default_value: int =0, parent: Union[int, str] =internal_dpg.mvReservedUUID_3, **kwargs) -> Union[int, str]:
""" Adds a int value.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
source (Union[int, str], optional): Overrides 'id' as value storage key.
default_value (int, optional):
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_int_value(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, source=source, default_value=default_value, parent=parent, **kwargs)
def add_item_activated_handler(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, parent: Union[int, str] =0, callback: Callable =None, show: bool =True, **kwargs) -> Union[int, str]:
""" Adds a activated handler.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
callback (Callable, optional): Registers a callback.
show (bool, optional): Attempt to render widget.
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_item_activated_handler(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, parent=parent, callback=callback, show=show, **kwargs)
def add_item_active_handler(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, parent: Union[int, str] =0, callback: Callable =None, show: bool =True, **kwargs) -> Union[int, str]:
""" Adds a active handler.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
callback (Callable, optional): Registers a callback.
show (bool, optional): Attempt to render widget.
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_item_active_handler(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, parent=parent, callback=callback, show=show, **kwargs)
def add_item_clicked_handler(button : int =-1, *, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, parent: Union[int, str] =0, callback: Callable =None, show: bool =True, **kwargs) -> Union[int, str]:
""" Adds a clicked handler.
Args:
button (int, optional): Submits callback for all mouse buttons
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
callback (Callable, optional): Registers a callback.
show (bool, optional): Attempt to render widget.
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_item_clicked_handler(button, label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, parent=parent, callback=callback, show=show, **kwargs)
def add_item_deactivated_after_edit_handler(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, parent: Union[int, str] =0, callback: Callable =None, show: bool =True, **kwargs) -> Union[int, str]:
""" Adds a deactivated after edit handler.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
callback (Callable, optional): Registers a callback.
show (bool, optional): Attempt to render widget.
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_item_deactivated_after_edit_handler(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, parent=parent, callback=callback, show=show, **kwargs)
def add_item_deactivated_handler(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, parent: Union[int, str] =0, callback: Callable =None, show: bool =True, **kwargs) -> Union[int, str]:
""" Adds a deactivated handler.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
callback (Callable, optional): Registers a callback.
show (bool, optional): Attempt to render widget.
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_item_deactivated_handler(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, parent=parent, callback=callback, show=show, **kwargs)
def add_item_edited_handler(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, parent: Union[int, str] =0, callback: Callable =None, show: bool =True, **kwargs) -> Union[int, str]:
""" Adds an edited handler.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
callback (Callable, optional): Registers a callback.
show (bool, optional): Attempt to render widget.
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_item_edited_handler(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, parent=parent, callback=callback, show=show, **kwargs)
def add_item_focus_handler(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, parent: Union[int, str] =0, callback: Callable =None, show: bool =True, **kwargs) -> Union[int, str]:
""" Adds a focus handler.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
callback (Callable, optional): Registers a callback.
show (bool, optional): Attempt to render widget.
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_item_focus_handler(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, parent=parent, callback=callback, show=show, **kwargs)
def add_item_handler_registry(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, show: bool =True, **kwargs) -> Union[int, str]:
""" Adds an item handler registry.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
show (bool, optional): Attempt to render widget.
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_item_handler_registry(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, show=show, **kwargs)
def add_item_hover_handler(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, parent: Union[int, str] =0, callback: Callable =None, show: bool =True, **kwargs) -> Union[int, str]:
""" Adds a hover handler.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
callback (Callable, optional): Registers a callback.
show (bool, optional): Attempt to render widget.
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_item_hover_handler(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, parent=parent, callback=callback, show=show, **kwargs)
def add_item_resize_handler(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, parent: Union[int, str] =0, callback: Callable =None, show: bool =True, **kwargs) -> Union[int, str]:
""" Adds a resize handler.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
callback (Callable, optional): Registers a callback.
show (bool, optional): Attempt to render widget.
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_item_resize_handler(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, parent=parent, callback=callback, show=show, **kwargs)
def add_item_toggled_open_handler(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, parent: Union[int, str] =0, callback: Callable =None, show: bool =True, **kwargs) -> Union[int, str]:
""" Adds a togged open handler.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
callback (Callable, optional): Registers a callback.
show (bool, optional): Attempt to render widget.
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_item_toggled_open_handler(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, parent=parent, callback=callback, show=show, **kwargs)
def add_item_visible_handler(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, parent: Union[int, str] =0, callback: Callable =None, show: bool =True, **kwargs) -> Union[int, str]:
""" Adds a visible handler.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
callback (Callable, optional): Registers a callback.
show (bool, optional): Attempt to render widget.
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_item_visible_handler(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, parent=parent, callback=callback, show=show, **kwargs)
def add_key_down_handler(key : int =-1, *, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, callback: Callable =None, show: bool =True, parent: Union[int, str] =internal_dpg.mvReservedUUID_1, **kwargs) -> Union[int, str]:
""" Adds a key down handler.
Args:
key (int, optional): Submits callback for all keys
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
callback (Callable, optional): Registers a callback.
show (bool, optional): Attempt to render widget.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_key_down_handler(key, label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, callback=callback, show=show, parent=parent, **kwargs)
def add_key_press_handler(key : int =-1, *, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, callback: Callable =None, show: bool =True, parent: Union[int, str] =internal_dpg.mvReservedUUID_1, **kwargs) -> Union[int, str]:
""" Adds a key press handler.
Args:
key (int, optional): Submits callback for all keys
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
callback (Callable, optional): Registers a callback.
show (bool, optional): Attempt to render widget.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_key_press_handler(key, label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, callback=callback, show=show, parent=parent, **kwargs)
def add_key_release_handler(key : int =-1, *, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, callback: Callable =None, show: bool =True, parent: Union[int, str] =internal_dpg.mvReservedUUID_1, **kwargs) -> Union[int, str]:
""" Adds a key release handler.
Args:
key (int, optional): Submits callback for all keys
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
callback (Callable, optional): Registers a callback.
show (bool, optional): Attempt to render widget.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_key_release_handler(key, label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, callback=callback, show=show, parent=parent, **kwargs)
def add_knob_float(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, width: int =0, height: int =0, indent: int =-1, parent: Union[int, str] =0, before: Union[int, str] =0, source: Union[int, str] =0, payload_type: str ='$$DPG_PAYLOAD', callback: Callable =None, drag_callback: Callable =None, drop_callback: Callable =None, show: bool =True, enabled: bool =True, pos: Union[List[int], Tuple[int, ...]] =[], filter_key: str ='', tracked: bool =False, track_offset: float =0.5, default_value: float =0.0, min_value: float =0.0, max_value: float =100.0, **kwargs) -> Union[int, str]:
""" Adds a knob that rotates based on change in x mouse position.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
width (int, optional): Width of the item.
height (int, optional): Height of the item.
indent (int, optional): Offsets the widget to the right the specified number multiplied by the indent style.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
source (Union[int, str], optional): Overrides 'id' as value storage key.
payload_type (str, optional): Sender string type must be the same as the target for the target to run the payload_callback.
callback (Callable, optional): Registers a callback.
drag_callback (Callable, optional): Registers a drag callback for drag and drop.
drop_callback (Callable, optional): Registers a drop callback for drag and drop.
show (bool, optional): Attempt to render widget.
enabled (bool, optional): Turns off functionality of widget and applies the disabled theme.
pos (Union[List[int], Tuple[int, ...]], optional): Places the item relative to window coordinates, [0,0] is top left.
filter_key (str, optional): Used by filter widget.
tracked (bool, optional): Scroll tracking
track_offset (float, optional): 0.0f:top, 0.5f:center, 1.0f:bottom
default_value (float, optional):
min_value (float, optional): Applies lower limit to value.
max_value (float, optional): Applies upper limit to value.
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_knob_float(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, width=width, height=height, indent=indent, parent=parent, before=before, source=source, payload_type=payload_type, callback=callback, drag_callback=drag_callback, drop_callback=drop_callback, show=show, enabled=enabled, pos=pos, filter_key=filter_key, tracked=tracked, track_offset=track_offset, default_value=default_value, min_value=min_value, max_value=max_value, **kwargs)
def add_line_series(x : Union[List[float], Tuple[float, ...]], y : Union[List[float], Tuple[float, ...]], *, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, parent: Union[int, str] =0, before: Union[int, str] =0, source: Union[int, str] =0, show: bool =True, **kwargs) -> Union[int, str]:
""" Adds a line series to a plot.
Args:
x (Any):
y (Any):
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
source (Union[int, str], optional): Overrides 'id' as value storage key.
show (bool, optional): Attempt to render widget.
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_line_series(x, y, label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, parent=parent, before=before, source=source, show=show, **kwargs)
def add_listbox(items : Union[List[str], Tuple[str, ...]] =(), *, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, width: int =0, indent: int =-1, parent: Union[int, str] =0, before: Union[int, str] =0, source: Union[int, str] =0, payload_type: str ='$$DPG_PAYLOAD', callback: Callable =None, drag_callback: Callable =None, drop_callback: Callable =None, show: bool =True, enabled: bool =True, pos: Union[List[int], Tuple[int, ...]] =[], filter_key: str ='', tracked: bool =False, track_offset: float =0.5, default_value: str ='', num_items: int =3, **kwargs) -> Union[int, str]:
""" Adds a listbox. If height is not large enough to show all items a scroll bar will appear.
Args:
items (Union[List[str], Tuple[str, ...]], optional): A tuple of items to be shown in the listbox. Can consist of any combination of types. All items will be displayed as strings.
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
width (int, optional): Width of the item.
indent (int, optional): Offsets the widget to the right the specified number multiplied by the indent style.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
source (Union[int, str], optional): Overrides 'id' as value storage key.
payload_type (str, optional): Sender string type must be the same as the target for the target to run the payload_callback.
callback (Callable, optional): Registers a callback.
drag_callback (Callable, optional): Registers a drag callback for drag and drop.
drop_callback (Callable, optional): Registers a drop callback for drag and drop.
show (bool, optional): Attempt to render widget.
enabled (bool, optional): Turns off functionality of widget and applies the disabled theme.
pos (Union[List[int], Tuple[int, ...]], optional): Places the item relative to window coordinates, [0,0] is top left.
filter_key (str, optional): Used by filter widget.
tracked (bool, optional): Scroll tracking
track_offset (float, optional): 0.0f:top, 0.5f:center, 1.0f:bottom
default_value (str, optional): String value fo the item that will be selected by default.
num_items (int, optional): Expands the height of the listbox to show specified number of items.
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_listbox(items, label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, width=width, indent=indent, parent=parent, before=before, source=source, payload_type=payload_type, callback=callback, drag_callback=drag_callback, drop_callback=drop_callback, show=show, enabled=enabled, pos=pos, filter_key=filter_key, tracked=tracked, track_offset=track_offset, default_value=default_value, num_items=num_items, **kwargs)
def add_loading_indicator(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, width: int =0, height: int =0, indent: int =-1, parent: Union[int, str] =0, before: Union[int, str] =0, payload_type: str ='$$DPG_PAYLOAD', drop_callback: Callable =None, show: bool =True, pos: Union[List[int], Tuple[int, ...]] =[], style: int =0, circle_count: int =8, speed: float =1.0, radius: float =3.0, thickness: float =1.0, color: Union[List[int], Tuple[int, ...]] =(51, 51, 55, 255), secondary_color: Union[List[int], Tuple[int, ...]] =(29, 151, 236, 103), **kwargs) -> Union[int, str]:
""" Adds a rotating animated loading symbol.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
width (int, optional): Width of the item.
height (int, optional): Height of the item.
indent (int, optional): Offsets the widget to the right the specified number multiplied by the indent style.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
payload_type (str, optional): Sender string type must be the same as the target for the target to run the payload_callback.
drop_callback (Callable, optional): Registers a drop callback for drag and drop.
show (bool, optional): Attempt to render widget.
pos (Union[List[int], Tuple[int, ...]], optional): Places the item relative to window coordinates, [0,0] is top left.
style (int, optional): 0 is rotating dots style, 1 is rotating bar style.
circle_count (int, optional): Number of dots show if dots or size of circle if circle.
speed (float, optional): Speed the anamation will rotate.
radius (float, optional): Radius size of the loading indicator.
thickness (float, optional): Thickness of the circles or line.
color (Union[List[int], Tuple[int, ...]], optional): Color of the growing center circle.
secondary_color (Union[List[int], Tuple[int, ...]], optional): Background of the dots in dot mode.
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_loading_indicator(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, width=width, height=height, indent=indent, parent=parent, before=before, payload_type=payload_type, drop_callback=drop_callback, show=show, pos=pos, style=style, circle_count=circle_count, speed=speed, radius=radius, thickness=thickness, color=color, secondary_color=secondary_color, **kwargs)
def add_menu(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, indent: int =-1, parent: Union[int, str] =0, before: Union[int, str] =0, payload_type: str ='$$DPG_PAYLOAD', drop_callback: Callable =None, show: bool =True, enabled: bool =True, filter_key: str ='', delay_search: bool =False, tracked: bool =False, track_offset: float =0.5, **kwargs) -> Union[int, str]:
""" Adds a menu to an existing menu bar.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
indent (int, optional): Offsets the widget to the right the specified number multiplied by the indent style.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
payload_type (str, optional): Sender string type must be the same as the target for the target to run the payload_callback.
drop_callback (Callable, optional): Registers a drop callback for drag and drop.
show (bool, optional): Attempt to render widget.
enabled (bool, optional): Turns off functionality of widget and applies the disabled theme.
filter_key (str, optional): Used by filter widget.
delay_search (bool, optional): Delays searching container for specified items until the end of the app. Possible optimization when a container has many children that are not accessed often.
tracked (bool, optional): Scroll tracking
track_offset (float, optional): 0.0f:top, 0.5f:center, 1.0f:bottom
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_menu(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, indent=indent, parent=parent, before=before, payload_type=payload_type, drop_callback=drop_callback, show=show, enabled=enabled, filter_key=filter_key, delay_search=delay_search, tracked=tracked, track_offset=track_offset, **kwargs)
def add_menu_bar(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, indent: int =-1, parent: Union[int, str] =0, show: bool =True, delay_search: bool =False, **kwargs) -> Union[int, str]:
""" Adds a menu bar to a window.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
indent (int, optional): Offsets the widget to the right the specified number multiplied by the indent style.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
show (bool, optional): Attempt to render widget.
delay_search (bool, optional): Delays searching container for specified items until the end of the app. Possible optimization when a container has many children that are not accessed often.
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_menu_bar(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, indent=indent, parent=parent, show=show, delay_search=delay_search, **kwargs)
def add_menu_item(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, indent: int =-1, parent: Union[int, str] =0, before: Union[int, str] =0, payload_type: str ='$$DPG_PAYLOAD', callback: Callable =None, drop_callback: Callable =None, show: bool =True, enabled: bool =True, filter_key: str ='', tracked: bool =False, track_offset: float =0.5, default_value: bool =False, shortcut: str ='', check: bool =False, **kwargs) -> Union[int, str]:
""" Adds a menu item to an existing menu. Menu items act similar to selectables and has a bool value. When placed in a menu the checkmark will reflect its value.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
indent (int, optional): Offsets the widget to the right the specified number multiplied by the indent style.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
payload_type (str, optional): Sender string type must be the same as the target for the target to run the payload_callback.
callback (Callable, optional): Registers a callback.
drop_callback (Callable, optional): Registers a drop callback for drag and drop.
show (bool, optional): Attempt to render widget.
enabled (bool, optional): Turns off functionality of widget and applies the disabled theme.
filter_key (str, optional): Used by filter widget.
tracked (bool, optional): Scroll tracking
track_offset (float, optional): 0.0f:top, 0.5f:center, 1.0f:bottom
default_value (bool, optional): This value also controls the checkmark when shown.
shortcut (str, optional): Displays text on the menu item. Typically used to show a shortcut key command.
check (bool, optional): Displays a checkmark on the menu item when it is selected and placed in a menu.
id (Union[int, str], optional): (deprecated)
drag_callback (Callable, optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
if 'drag_callback' in kwargs.keys():
warnings.warn('drag_callback keyword removed', DeprecationWarning, 2)
kwargs.pop('drag_callback', None)
return internal_dpg.add_menu_item(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, indent=indent, parent=parent, before=before, payload_type=payload_type, callback=callback, drop_callback=drop_callback, show=show, enabled=enabled, filter_key=filter_key, tracked=tracked, track_offset=track_offset, default_value=default_value, shortcut=shortcut, check=check, **kwargs)
def add_mouse_click_handler(button : int =-1, *, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, callback: Callable =None, show: bool =True, parent: Union[int, str] =internal_dpg.mvReservedUUID_1, **kwargs) -> Union[int, str]:
""" Adds a mouse click handler.
Args:
button (int, optional): Submits callback for all mouse buttons
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
callback (Callable, optional): Registers a callback.
show (bool, optional): Attempt to render widget.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_mouse_click_handler(button, label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, callback=callback, show=show, parent=parent, **kwargs)
def add_mouse_double_click_handler(button : int =-1, *, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, callback: Callable =None, show: bool =True, parent: Union[int, str] =internal_dpg.mvReservedUUID_1, **kwargs) -> Union[int, str]:
""" Adds a mouse double click handler.
Args:
button (int, optional): Submits callback for all mouse buttons
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
callback (Callable, optional): Registers a callback.
show (bool, optional): Attempt to render widget.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_mouse_double_click_handler(button, label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, callback=callback, show=show, parent=parent, **kwargs)
def add_mouse_down_handler(button : int =-1, *, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, callback: Callable =None, show: bool =True, parent: Union[int, str] =internal_dpg.mvReservedUUID_1, **kwargs) -> Union[int, str]:
""" Adds a mouse down handler.
Args:
button (int, optional): Submits callback for all mouse buttons
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
callback (Callable, optional): Registers a callback.
show (bool, optional): Attempt to render widget.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_mouse_down_handler(button, label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, callback=callback, show=show, parent=parent, **kwargs)
def add_mouse_drag_handler(button : int =-1, threshold : float =10.0, *, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, callback: Callable =None, show: bool =True, parent: Union[int, str] =internal_dpg.mvReservedUUID_1, **kwargs) -> Union[int, str]:
""" Adds a mouse drag handler.
Args:
button (int, optional): Submits callback for all mouse buttons
threshold (float, optional): The threshold the mouse must be dragged before the callback is ran
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
callback (Callable, optional): Registers a callback.
show (bool, optional): Attempt to render widget.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_mouse_drag_handler(button, threshold, label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, callback=callback, show=show, parent=parent, **kwargs)
def add_mouse_move_handler(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, callback: Callable =None, show: bool =True, parent: Union[int, str] =internal_dpg.mvReservedUUID_1, **kwargs) -> Union[int, str]:
""" Adds a mouse move handler.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
callback (Callable, optional): Registers a callback.
show (bool, optional): Attempt to render widget.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_mouse_move_handler(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, callback=callback, show=show, parent=parent, **kwargs)
def add_mouse_release_handler(button : int =-1, *, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, callback: Callable =None, show: bool =True, parent: Union[int, str] =internal_dpg.mvReservedUUID_1, **kwargs) -> Union[int, str]:
""" Adds a mouse release handler.
Args:
button (int, optional): Submits callback for all mouse buttons
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
callback (Callable, optional): Registers a callback.
show (bool, optional): Attempt to render widget.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_mouse_release_handler(button, label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, callback=callback, show=show, parent=parent, **kwargs)
def add_mouse_wheel_handler(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, callback: Callable =None, show: bool =True, parent: Union[int, str] =internal_dpg.mvReservedUUID_1, **kwargs) -> Union[int, str]:
""" Adds a mouse wheel handler.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
callback (Callable, optional): Registers a callback.
show (bool, optional): Attempt to render widget.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_mouse_wheel_handler(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, callback=callback, show=show, parent=parent, **kwargs)
def add_node(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, parent: Union[int, str] =0, before: Union[int, str] =0, payload_type: str ='$$DPG_PAYLOAD', drag_callback: Callable =None, drop_callback: Callable =None, show: bool =True, pos: Union[List[int], Tuple[int, ...]] =[], filter_key: str ='', delay_search: bool =False, tracked: bool =False, track_offset: float =0.5, draggable: bool =True, **kwargs) -> Union[int, str]:
""" Adds a node to a node editor.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
payload_type (str, optional): Sender string type must be the same as the target for the target to run the payload_callback.
drag_callback (Callable, optional): Registers a drag callback for drag and drop.
drop_callback (Callable, optional): Registers a drop callback for drag and drop.
show (bool, optional): Attempt to render widget.
pos (Union[List[int], Tuple[int, ...]], optional): Places the item relative to window coordinates, [0,0] is top left.
filter_key (str, optional): Used by filter widget.
delay_search (bool, optional): Delays searching container for specified items until the end of the app. Possible optimization when a container has many children that are not accessed often.
tracked (bool, optional): Scroll tracking
track_offset (float, optional): 0.0f:top, 0.5f:center, 1.0f:bottom
draggable (bool, optional): Allow node to be draggable.
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_node(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, parent=parent, before=before, payload_type=payload_type, drag_callback=drag_callback, drop_callback=drop_callback, show=show, pos=pos, filter_key=filter_key, delay_search=delay_search, tracked=tracked, track_offset=track_offset, draggable=draggable, **kwargs)
def add_node_attribute(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, indent: int =-1, parent: Union[int, str] =0, before: Union[int, str] =0, show: bool =True, filter_key: str ='', tracked: bool =False, track_offset: float =0.5, attribute_type: int =0, shape: int =1, category: str ='general', **kwargs) -> Union[int, str]:
""" Adds a node attribute to a node.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
indent (int, optional): Offsets the widget to the right the specified number multiplied by the indent style.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
show (bool, optional): Attempt to render widget.
filter_key (str, optional): Used by filter widget.
tracked (bool, optional): Scroll tracking
track_offset (float, optional): 0.0f:top, 0.5f:center, 1.0f:bottom
attribute_type (int, optional): mvNode_Attr_Input, mvNode_Attr_Output, or mvNode_Attr_Static.
shape (int, optional): Pin shape.
category (str, optional): Category
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_node_attribute(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, indent=indent, parent=parent, before=before, show=show, filter_key=filter_key, tracked=tracked, track_offset=track_offset, attribute_type=attribute_type, shape=shape, category=category, **kwargs)
def add_node_editor(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, width: int =0, height: int =0, parent: Union[int, str] =0, before: Union[int, str] =0, callback: Callable =None, show: bool =True, filter_key: str ='', delay_search: bool =False, tracked: bool =False, track_offset: float =0.5, delink_callback: Callable =None, menubar: bool =False, **kwargs) -> Union[int, str]:
""" Adds a node editor.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
width (int, optional): Width of the item.
height (int, optional): Height of the item.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
callback (Callable, optional): Registers a callback.
show (bool, optional): Attempt to render widget.
filter_key (str, optional): Used by filter widget.
delay_search (bool, optional): Delays searching container for specified items until the end of the app. Possible optimization when a container has many children that are not accessed often.
tracked (bool, optional): Scroll tracking
track_offset (float, optional): 0.0f:top, 0.5f:center, 1.0f:bottom
delink_callback (Callable, optional): Callback ran when a link is detached.
menubar (bool, optional): Shows or hides the menubar.
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_node_editor(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, width=width, height=height, parent=parent, before=before, callback=callback, show=show, filter_key=filter_key, delay_search=delay_search, tracked=tracked, track_offset=track_offset, delink_callback=delink_callback, menubar=menubar, **kwargs)
def add_node_link(attr_1 : Union[int, str], attr_2 : Union[int, str], *, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, parent: Union[int, str] =0, show: bool =True, **kwargs) -> Union[int, str]:
""" Adds a node link between 2 node attributes.
Args:
attr_1 (Union[int, str]):
attr_2 (Union[int, str]):
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
show (bool, optional): Attempt to render widget.
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_node_link(attr_1, attr_2, label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, parent=parent, show=show, **kwargs)
def add_pie_series(x : float, y : float, radius : float, values : Union[List[float], Tuple[float, ...]], labels : Union[List[str], Tuple[str, ...]], *, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, parent: Union[int, str] =0, before: Union[int, str] =0, source: Union[int, str] =0, show: bool =True, format: str ='%0.2f', angle: float =90.0, normalize: bool =False, **kwargs) -> Union[int, str]:
""" Adds an pie series to a plot.
Args:
x (float):
y (float):
radius (float):
values (Any):
labels (Union[List[str], Tuple[str, ...]]):
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
source (Union[int, str], optional): Overrides 'id' as value storage key.
show (bool, optional): Attempt to render widget.
format (str, optional):
angle (float, optional):
normalize (bool, optional):
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_pie_series(x, y, radius, values, labels, label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, parent=parent, before=before, source=source, show=show, format=format, angle=angle, normalize=normalize, **kwargs)
def add_plot(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, width: int =0, height: int =0, indent: int =-1, parent: Union[int, str] =0, before: Union[int, str] =0, payload_type: str ='$$DPG_PAYLOAD', callback: Callable =None, drag_callback: Callable =None, drop_callback: Callable =None, show: bool =True, pos: Union[List[int], Tuple[int, ...]] =[], filter_key: str ='', delay_search: bool =False, tracked: bool =False, track_offset: float =0.5, no_title: bool =False, no_menus: bool =False, no_box_select: bool =False, no_mouse_pos: bool =False, no_highlight: bool =False, no_child: bool =False, query: bool =False, crosshairs: bool =False, anti_aliased: bool =False, equal_aspects: bool =False, pan_button: int =internal_dpg.mvMouseButton_Left, pan_mod: int =-1, fit_button: int =internal_dpg.mvMouseButton_Left, context_menu_button: int =internal_dpg.mvMouseButton_Right, box_select_button: int =internal_dpg.mvMouseButton_Right, box_select_mod: int =-1, box_select_cancel_button: int =internal_dpg.mvMouseButton_Left, query_button: int =internal_dpg.mvMouseButton_Middle, query_mod: int =-1, query_toggle_mod: int =internal_dpg.mvKey_Control, horizontal_mod: int =internal_dpg.mvKey_Alt, vertical_mod: int =internal_dpg.mvKey_Shift, **kwargs) -> Union[int, str]:
""" Adds a plot which is used to hold series, and can be drawn to with draw commands.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
width (int, optional): Width of the item.
height (int, optional): Height of the item.
indent (int, optional): Offsets the widget to the right the specified number multiplied by the indent style.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
payload_type (str, optional): Sender string type must be the same as the target for the target to run the payload_callback.
callback (Callable, optional): Registers a callback.
drag_callback (Callable, optional): Registers a drag callback for drag and drop.
drop_callback (Callable, optional): Registers a drop callback for drag and drop.
show (bool, optional): Attempt to render widget.
pos (Union[List[int], Tuple[int, ...]], optional): Places the item relative to window coordinates, [0,0] is top left.
filter_key (str, optional): Used by filter widget.
delay_search (bool, optional): Delays searching container for specified items until the end of the app. Possible optimization when a container has many children that are not accessed often.
tracked (bool, optional): Scroll tracking
track_offset (float, optional): 0.0f:top, 0.5f:center, 1.0f:bottom
no_title (bool, optional):
no_menus (bool, optional):
no_box_select (bool, optional):
no_mouse_pos (bool, optional):
no_highlight (bool, optional):
no_child (bool, optional):
query (bool, optional):
crosshairs (bool, optional):
anti_aliased (bool, optional):
equal_aspects (bool, optional):
pan_button (int, optional): enables panning when held
pan_mod (int, optional): optional modifier that must be held for panning
fit_button (int, optional): fits visible data when double clicked
context_menu_button (int, optional): opens plot context menu (if enabled) when clicked
box_select_button (int, optional): begins box selection when pressed and confirms selection when released
box_select_mod (int, optional): begins box selection when pressed and confirms selection when released
box_select_cancel_button (int, optional): cancels active box selection when pressed
query_button (int, optional): begins query selection when pressed and end query selection when released
query_mod (int, optional): optional modifier that must be held for query selection
query_toggle_mod (int, optional): when held, active box selections turn into queries
horizontal_mod (int, optional): expands active box selection/query horizontally to plot edge when held
vertical_mod (int, optional): expands active box selection/query vertically to plot edge when held
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_plot(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, width=width, height=height, indent=indent, parent=parent, before=before, payload_type=payload_type, callback=callback, drag_callback=drag_callback, drop_callback=drop_callback, show=show, pos=pos, filter_key=filter_key, delay_search=delay_search, tracked=tracked, track_offset=track_offset, no_title=no_title, no_menus=no_menus, no_box_select=no_box_select, no_mouse_pos=no_mouse_pos, no_highlight=no_highlight, no_child=no_child, query=query, crosshairs=crosshairs, anti_aliased=anti_aliased, equal_aspects=equal_aspects, pan_button=pan_button, pan_mod=pan_mod, fit_button=fit_button, context_menu_button=context_menu_button, box_select_button=box_select_button, box_select_mod=box_select_mod, box_select_cancel_button=box_select_cancel_button, query_button=query_button, query_mod=query_mod, query_toggle_mod=query_toggle_mod, horizontal_mod=horizontal_mod, vertical_mod=vertical_mod, **kwargs)
def add_plot_annotation(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, parent: Union[int, str] =0, before: Union[int, str] =0, source: Union[int, str] =0, show: bool =True, default_value: Any =(0.0, 0.0), offset: Union[List[float], Tuple[float, ...]] =(0.0, 0.0), color: Union[List[int], Tuple[int, ...]] =(0, 0, 0, -255), clamped: bool =True, **kwargs) -> Union[int, str]:
""" Adds an annotation to a plot.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
source (Union[int, str], optional): Overrides 'id' as value storage key.
show (bool, optional): Attempt to render widget.
default_value (Any, optional):
offset (Union[List[float], Tuple[float, ...]], optional):
color (Union[List[int], Tuple[int, ...]], optional):
clamped (bool, optional):
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_plot_annotation(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, parent=parent, before=before, source=source, show=show, default_value=default_value, offset=offset, color=color, clamped=clamped, **kwargs)
def add_plot_axis(axis : int, *, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, parent: Union[int, str] =0, payload_type: str ='$$DPG_PAYLOAD', drop_callback: Callable =None, show: bool =True, no_gridlines: bool =False, no_tick_marks: bool =False, no_tick_labels: bool =False, log_scale: bool =False, invert: bool =False, lock_min: bool =False, lock_max: bool =False, time: bool =False, **kwargs) -> Union[int, str]:
""" Adds an axis to a plot.
Args:
axis (int):
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
payload_type (str, optional): Sender string type must be the same as the target for the target to run the payload_callback.
drop_callback (Callable, optional): Registers a drop callback for drag and drop.
show (bool, optional): Attempt to render widget.
no_gridlines (bool, optional):
no_tick_marks (bool, optional):
no_tick_labels (bool, optional):
log_scale (bool, optional):
invert (bool, optional):
lock_min (bool, optional):
lock_max (bool, optional):
time (bool, optional):
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_plot_axis(axis, label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, parent=parent, payload_type=payload_type, drop_callback=drop_callback, show=show, no_gridlines=no_gridlines, no_tick_marks=no_tick_marks, no_tick_labels=no_tick_labels, log_scale=log_scale, invert=invert, lock_min=lock_min, lock_max=lock_max, time=time, **kwargs)
def add_plot_legend(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, parent: Union[int, str] =0, payload_type: str ='$$DPG_PAYLOAD', drop_callback: Callable =None, show: bool =True, location: int =5, horizontal: bool =False, outside: bool =False, **kwargs) -> Union[int, str]:
""" Adds a plot legend to a plot.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
payload_type (str, optional): Sender string type must be the same as the target for the target to run the payload_callback.
drop_callback (Callable, optional): Registers a drop callback for drag and drop.
show (bool, optional): Attempt to render widget.
location (int, optional): location, mvPlot_Location_*
horizontal (bool, optional):
outside (bool, optional):
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_plot_legend(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, parent=parent, payload_type=payload_type, drop_callback=drop_callback, show=show, location=location, horizontal=horizontal, outside=outside, **kwargs)
def add_progress_bar(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, width: int =0, height: int =0, indent: int =-1, parent: Union[int, str] =0, before: Union[int, str] =0, source: Union[int, str] =0, payload_type: str ='$$DPG_PAYLOAD', drag_callback: Callable =None, drop_callback: Callable =None, show: bool =True, pos: Union[List[int], Tuple[int, ...]] =[], filter_key: str ='', tracked: bool =False, track_offset: float =0.5, overlay: str ='', default_value: float =0.0, **kwargs) -> Union[int, str]:
""" Adds a progress bar.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
width (int, optional): Width of the item.
height (int, optional): Height of the item.
indent (int, optional): Offsets the widget to the right the specified number multiplied by the indent style.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
source (Union[int, str], optional): Overrides 'id' as value storage key.
payload_type (str, optional): Sender string type must be the same as the target for the target to run the payload_callback.
drag_callback (Callable, optional): Registers a drag callback for drag and drop.
drop_callback (Callable, optional): Registers a drop callback for drag and drop.
show (bool, optional): Attempt to render widget.
pos (Union[List[int], Tuple[int, ...]], optional): Places the item relative to window coordinates, [0,0] is top left.
filter_key (str, optional): Used by filter widget.
tracked (bool, optional): Scroll tracking
track_offset (float, optional): 0.0f:top, 0.5f:center, 1.0f:bottom
overlay (str, optional): Overlayed text onto the bar that typically used to display the value of the progress.
default_value (float, optional): Normalized value to fill the bar from 0.0 to 1.0.
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_progress_bar(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, width=width, height=height, indent=indent, parent=parent, before=before, source=source, payload_type=payload_type, drag_callback=drag_callback, drop_callback=drop_callback, show=show, pos=pos, filter_key=filter_key, tracked=tracked, track_offset=track_offset, overlay=overlay, default_value=default_value, **kwargs)
def add_radio_button(items : Union[List[str], Tuple[str, ...]] =(), *, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, indent: int =-1, parent: Union[int, str] =0, before: Union[int, str] =0, source: Union[int, str] =0, payload_type: str ='$$DPG_PAYLOAD', callback: Callable =None, drag_callback: Callable =None, drop_callback: Callable =None, show: bool =True, enabled: bool =True, pos: Union[List[int], Tuple[int, ...]] =[], filter_key: str ='', tracked: bool =False, track_offset: float =0.5, default_value: str ='', horizontal: bool =False, **kwargs) -> Union[int, str]:
""" Adds a set of radio buttons. If items keyword is empty, nothing will be shown.
Args:
items (Union[List[str], Tuple[str, ...]], optional): A tuple of items to be shown as radio options. Can consist of any combination of types. All types will be shown as strings.
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
indent (int, optional): Offsets the widget to the right the specified number multiplied by the indent style.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
source (Union[int, str], optional): Overrides 'id' as value storage key.
payload_type (str, optional): Sender string type must be the same as the target for the target to run the payload_callback.
callback (Callable, optional): Registers a callback.
drag_callback (Callable, optional): Registers a drag callback for drag and drop.
drop_callback (Callable, optional): Registers a drop callback for drag and drop.
show (bool, optional): Attempt to render widget.
enabled (bool, optional): Turns off functionality of widget and applies the disabled theme.
pos (Union[List[int], Tuple[int, ...]], optional): Places the item relative to window coordinates, [0,0] is top left.
filter_key (str, optional): Used by filter widget.
tracked (bool, optional): Scroll tracking
track_offset (float, optional): 0.0f:top, 0.5f:center, 1.0f:bottom
default_value (str, optional): Default selected radio option. Set by using the string value of the item.
horizontal (bool, optional): Displays the radio options horizontally.
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_radio_button(items, label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, indent=indent, parent=parent, before=before, source=source, payload_type=payload_type, callback=callback, drag_callback=drag_callback, drop_callback=drop_callback, show=show, enabled=enabled, pos=pos, filter_key=filter_key, tracked=tracked, track_offset=track_offset, default_value=default_value, horizontal=horizontal, **kwargs)
def add_raw_texture(width : int, height : int, default_value : Union[List[float], Tuple[float, ...]], *, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, format: int =internal_dpg.mvFormat_Float_rgba, parent: Union[int, str] =internal_dpg.mvReservedUUID_2, **kwargs) -> Union[int, str]:
""" Adds a raw texture.
Args:
width (int):
height (int):
default_value (Union[List[float], Tuple[float, ...]]):
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
format (int, optional): Data format.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_raw_texture(width, height, default_value, label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, format=format, parent=parent, **kwargs)
def add_scatter_series(x : Union[List[float], Tuple[float, ...]], y : Union[List[float], Tuple[float, ...]], *, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, parent: Union[int, str] =0, before: Union[int, str] =0, source: Union[int, str] =0, show: bool =True, **kwargs) -> Union[int, str]:
""" Adds a scatter series to a plot.
Args:
x (Any):
y (Any):
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
source (Union[int, str], optional): Overrides 'id' as value storage key.
show (bool, optional): Attempt to render widget.
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_scatter_series(x, y, label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, parent=parent, before=before, source=source, show=show, **kwargs)
def add_selectable(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, width: int =0, height: int =0, indent: int =-1, parent: Union[int, str] =0, before: Union[int, str] =0, source: Union[int, str] =0, payload_type: str ='$$DPG_PAYLOAD', callback: Callable =None, drag_callback: Callable =None, drop_callback: Callable =None, show: bool =True, enabled: bool =True, pos: Union[List[int], Tuple[int, ...]] =[], filter_key: str ='', tracked: bool =False, track_offset: float =0.5, default_value: bool =False, span_columns: bool =False, **kwargs) -> Union[int, str]:
""" Adds a selectable. Similar to a button but can indicate its selected state.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
width (int, optional): Width of the item.
height (int, optional): Height of the item.
indent (int, optional): Offsets the widget to the right the specified number multiplied by the indent style.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
source (Union[int, str], optional): Overrides 'id' as value storage key.
payload_type (str, optional): Sender string type must be the same as the target for the target to run the payload_callback.
callback (Callable, optional): Registers a callback.
drag_callback (Callable, optional): Registers a drag callback for drag and drop.
drop_callback (Callable, optional): Registers a drop callback for drag and drop.
show (bool, optional): Attempt to render widget.
enabled (bool, optional): Turns off functionality of widget and applies the disabled theme.
pos (Union[List[int], Tuple[int, ...]], optional): Places the item relative to window coordinates, [0,0] is top left.
filter_key (str, optional): Used by filter widget.
tracked (bool, optional): Scroll tracking
track_offset (float, optional): 0.0f:top, 0.5f:center, 1.0f:bottom
default_value (bool, optional):
span_columns (bool, optional): Forces the selectable to span the width of all columns if placed in a table.
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_selectable(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, width=width, height=height, indent=indent, parent=parent, before=before, source=source, payload_type=payload_type, callback=callback, drag_callback=drag_callback, drop_callback=drop_callback, show=show, enabled=enabled, pos=pos, filter_key=filter_key, tracked=tracked, track_offset=track_offset, default_value=default_value, span_columns=span_columns, **kwargs)
def add_separator(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, indent: int =-1, parent: Union[int, str] =0, before: Union[int, str] =0, show: bool =True, pos: Union[List[int], Tuple[int, ...]] =[], **kwargs) -> Union[int, str]:
""" Adds a horizontal line separator.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
indent (int, optional): Offsets the widget to the right the specified number multiplied by the indent style.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
show (bool, optional): Attempt to render widget.
pos (Union[List[int], Tuple[int, ...]], optional): Places the item relative to window coordinates, [0,0] is top left.
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_separator(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, indent=indent, parent=parent, before=before, show=show, pos=pos, **kwargs)
def add_series_value(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, source: Union[int, str] =0, default_value: Any =(), parent: Union[int, str] =internal_dpg.mvReservedUUID_3, **kwargs) -> Union[int, str]:
""" Adds a plot series value.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
source (Union[int, str], optional): Overrides 'id' as value storage key.
default_value (Any, optional):
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_series_value(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, source=source, default_value=default_value, parent=parent, **kwargs)
def add_shade_series(x : Union[List[float], Tuple[float, ...]], y1 : Union[List[float], Tuple[float, ...]], *, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, parent: Union[int, str] =0, before: Union[int, str] =0, source: Union[int, str] =0, show: bool =True, y2: Any =[], **kwargs) -> Union[int, str]:
""" Adds a shade series to a plot.
Args:
x (Any):
y1 (Any):
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
source (Union[int, str], optional): Overrides 'id' as value storage key.
show (bool, optional): Attempt to render widget.
y2 (Any, optional):
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_shade_series(x, y1, label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, parent=parent, before=before, source=source, show=show, y2=y2, **kwargs)
def add_simple_plot(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, width: int =0, height: int =0, indent: int =-1, parent: Union[int, str] =0, before: Union[int, str] =0, source: Union[int, str] =0, payload_type: str ='$$DPG_PAYLOAD', drag_callback: Callable =None, drop_callback: Callable =None, show: bool =True, filter_key: str ='', tracked: bool =False, track_offset: float =0.5, default_value: Union[List[float], Tuple[float, ...]] =(), overlay: str ='', histogram: bool =False, autosize: bool =True, min_scale: float =0.0, max_scale: float =0.0, **kwargs) -> Union[int, str]:
""" Adds a simple plot for visualization of a 1 dimensional set of values.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
width (int, optional): Width of the item.
height (int, optional): Height of the item.
indent (int, optional): Offsets the widget to the right the specified number multiplied by the indent style.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
source (Union[int, str], optional): Overrides 'id' as value storage key.
payload_type (str, optional): Sender string type must be the same as the target for the target to run the payload_callback.
drag_callback (Callable, optional): Registers a drag callback for drag and drop.
drop_callback (Callable, optional): Registers a drop callback for drag and drop.
show (bool, optional): Attempt to render widget.
filter_key (str, optional): Used by filter widget.
tracked (bool, optional): Scroll tracking
track_offset (float, optional): 0.0f:top, 0.5f:center, 1.0f:bottom
default_value (Union[List[float], Tuple[float, ...]], optional):
overlay (str, optional): overlays text (similar to a plot title)
histogram (bool, optional):
autosize (bool, optional):
min_scale (float, optional):
max_scale (float, optional):
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_simple_plot(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, width=width, height=height, indent=indent, parent=parent, before=before, source=source, payload_type=payload_type, drag_callback=drag_callback, drop_callback=drop_callback, show=show, filter_key=filter_key, tracked=tracked, track_offset=track_offset, default_value=default_value, overlay=overlay, histogram=histogram, autosize=autosize, min_scale=min_scale, max_scale=max_scale, **kwargs)
def add_slider_float(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, width: int =0, height: int =0, indent: int =-1, parent: Union[int, str] =0, before: Union[int, str] =0, source: Union[int, str] =0, payload_type: str ='$$DPG_PAYLOAD', callback: Callable =None, drag_callback: Callable =None, drop_callback: Callable =None, show: bool =True, enabled: bool =True, pos: Union[List[int], Tuple[int, ...]] =[], filter_key: str ='', tracked: bool =False, track_offset: float =0.5, default_value: float =0.0, vertical: bool =False, no_input: bool =False, clamped: bool =False, min_value: float =0.0, max_value: float =100.0, format: str ='%.3f', **kwargs) -> Union[int, str]:
""" Adds slider for a single float value. Directly entry can be done with double click or CTRL+Click. Min and Max alone are a soft limit for the slider. Use clamped keyword to also apply limits to the direct entry modes.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
width (int, optional): Width of the item.
height (int, optional): Height of the item.
indent (int, optional): Offsets the widget to the right the specified number multiplied by the indent style.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
source (Union[int, str], optional): Overrides 'id' as value storage key.
payload_type (str, optional): Sender string type must be the same as the target for the target to run the payload_callback.
callback (Callable, optional): Registers a callback.
drag_callback (Callable, optional): Registers a drag callback for drag and drop.
drop_callback (Callable, optional): Registers a drop callback for drag and drop.
show (bool, optional): Attempt to render widget.
enabled (bool, optional): Turns off functionality of widget and applies the disabled theme.
pos (Union[List[int], Tuple[int, ...]], optional): Places the item relative to window coordinates, [0,0] is top left.
filter_key (str, optional): Used by filter widget.
tracked (bool, optional): Scroll tracking
track_offset (float, optional): 0.0f:top, 0.5f:center, 1.0f:bottom
default_value (float, optional):
vertical (bool, optional): Sets orientation of the slidebar and slider to vertical.
no_input (bool, optional): Disable direct entry methods double-click or ctrl+click or Enter key allowing to input text directly into the item.
clamped (bool, optional): Applies the min and max limits to direct entry methods also such as double click and CTRL+Click.
min_value (float, optional): Applies a limit only to sliding entry only.
max_value (float, optional): Applies a limit only to sliding entry only.
format (str, optional): Determines the format the float will be displayed as use python string formatting.
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_slider_float(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, width=width, height=height, indent=indent, parent=parent, before=before, source=source, payload_type=payload_type, callback=callback, drag_callback=drag_callback, drop_callback=drop_callback, show=show, enabled=enabled, pos=pos, filter_key=filter_key, tracked=tracked, track_offset=track_offset, default_value=default_value, vertical=vertical, no_input=no_input, clamped=clamped, min_value=min_value, max_value=max_value, format=format, **kwargs)
def add_slider_floatx(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, width: int =0, indent: int =-1, parent: Union[int, str] =0, before: Union[int, str] =0, source: Union[int, str] =0, payload_type: str ='$$DPG_PAYLOAD', callback: Callable =None, drag_callback: Callable =None, drop_callback: Callable =None, show: bool =True, enabled: bool =True, pos: Union[List[int], Tuple[int, ...]] =[], filter_key: str ='', tracked: bool =False, track_offset: float =0.5, default_value: Union[List[float], Tuple[float, ...]] =(0.0, 0.0, 0.0, 0.0), size: int =4, no_input: bool =False, clamped: bool =False, min_value: float =0.0, max_value: float =100.0, format: str ='%.3f', **kwargs) -> Union[int, str]:
""" Adds multi slider for up to 4 float values. Directly entry can be done with double click or CTRL+Click. Min and Max alone are a soft limit for the slider. Use clamped keyword to also apply limits to the direct entry modes.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
width (int, optional): Width of the item.
indent (int, optional): Offsets the widget to the right the specified number multiplied by the indent style.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
source (Union[int, str], optional): Overrides 'id' as value storage key.
payload_type (str, optional): Sender string type must be the same as the target for the target to run the payload_callback.
callback (Callable, optional): Registers a callback.
drag_callback (Callable, optional): Registers a drag callback for drag and drop.
drop_callback (Callable, optional): Registers a drop callback for drag and drop.
show (bool, optional): Attempt to render widget.
enabled (bool, optional): Turns off functionality of widget and applies the disabled theme.
pos (Union[List[int], Tuple[int, ...]], optional): Places the item relative to window coordinates, [0,0] is top left.
filter_key (str, optional): Used by filter widget.
tracked (bool, optional): Scroll tracking
track_offset (float, optional): 0.0f:top, 0.5f:center, 1.0f:bottom
default_value (Union[List[float], Tuple[float, ...]], optional):
size (int, optional): Number of floats to be displayed.
no_input (bool, optional): Disable direct entry methods double-click or ctrl+click or Enter key allowing to input text directly into the item.
clamped (bool, optional): Applies the min and max limits to direct entry methods also such as double click and CTRL+Click.
min_value (float, optional): Applies a limit only to sliding entry only.
max_value (float, optional): Applies a limit only to sliding entry only.
format (str, optional): Determines the format the int will be displayed as use python string formatting.
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_slider_floatx(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, width=width, indent=indent, parent=parent, before=before, source=source, payload_type=payload_type, callback=callback, drag_callback=drag_callback, drop_callback=drop_callback, show=show, enabled=enabled, pos=pos, filter_key=filter_key, tracked=tracked, track_offset=track_offset, default_value=default_value, size=size, no_input=no_input, clamped=clamped, min_value=min_value, max_value=max_value, format=format, **kwargs)
def add_slider_int(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, width: int =0, height: int =0, indent: int =-1, parent: Union[int, str] =0, before: Union[int, str] =0, source: Union[int, str] =0, payload_type: str ='$$DPG_PAYLOAD', callback: Callable =None, drag_callback: Callable =None, drop_callback: Callable =None, show: bool =True, enabled: bool =True, pos: Union[List[int], Tuple[int, ...]] =[], filter_key: str ='', tracked: bool =False, track_offset: float =0.5, default_value: int =0, vertical: bool =False, no_input: bool =False, clamped: bool =False, min_value: int =0, max_value: int =100, format: str ='%d', **kwargs) -> Union[int, str]:
""" Adds slider for a single int value. Directly entry can be done with double click or CTRL+Click. Min and Max alone are a soft limit for the slider. Use clamped keyword to also apply limits to the direct entry modes.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
width (int, optional): Width of the item.
height (int, optional): Height of the item.
indent (int, optional): Offsets the widget to the right the specified number multiplied by the indent style.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
source (Union[int, str], optional): Overrides 'id' as value storage key.
payload_type (str, optional): Sender string type must be the same as the target for the target to run the payload_callback.
callback (Callable, optional): Registers a callback.
drag_callback (Callable, optional): Registers a drag callback for drag and drop.
drop_callback (Callable, optional): Registers a drop callback for drag and drop.
show (bool, optional): Attempt to render widget.
enabled (bool, optional): Turns off functionality of widget and applies the disabled theme.
pos (Union[List[int], Tuple[int, ...]], optional): Places the item relative to window coordinates, [0,0] is top left.
filter_key (str, optional): Used by filter widget.
tracked (bool, optional): Scroll tracking
track_offset (float, optional): 0.0f:top, 0.5f:center, 1.0f:bottom
default_value (int, optional):
vertical (bool, optional): Sets orientation of the slidebar and slider to vertical.
no_input (bool, optional): Disable direct entry methods double-click or ctrl+click or Enter key allowing to input text directly into the item.
clamped (bool, optional): Applies the min and max limits to direct entry methods also such as double click and CTRL+Click.
min_value (int, optional): Applies a limit only to sliding entry only.
max_value (int, optional): Applies a limit only to sliding entry only.
format (str, optional): Determines the format the int will be displayed as use python string formatting.
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_slider_int(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, width=width, height=height, indent=indent, parent=parent, before=before, source=source, payload_type=payload_type, callback=callback, drag_callback=drag_callback, drop_callback=drop_callback, show=show, enabled=enabled, pos=pos, filter_key=filter_key, tracked=tracked, track_offset=track_offset, default_value=default_value, vertical=vertical, no_input=no_input, clamped=clamped, min_value=min_value, max_value=max_value, format=format, **kwargs)
def add_slider_intx(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, width: int =0, indent: int =-1, parent: Union[int, str] =0, before: Union[int, str] =0, source: Union[int, str] =0, payload_type: str ='$$DPG_PAYLOAD', callback: Callable =None, drag_callback: Callable =None, drop_callback: Callable =None, show: bool =True, enabled: bool =True, pos: Union[List[int], Tuple[int, ...]] =[], filter_key: str ='', tracked: bool =False, track_offset: float =0.5, default_value: Union[List[int], Tuple[int, ...]] =(0, 0, 0, 0), size: int =4, no_input: bool =False, clamped: bool =False, min_value: int =0, max_value: int =100, format: str ='%d', **kwargs) -> Union[int, str]:
""" Adds multi slider for up to 4 int values. Directly entry can be done with double click or CTRL+Click. Min and Max alone are a soft limit for the slider. Use clamped keyword to also apply limits to the direct entry modes.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
width (int, optional): Width of the item.
indent (int, optional): Offsets the widget to the right the specified number multiplied by the indent style.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
source (Union[int, str], optional): Overrides 'id' as value storage key.
payload_type (str, optional): Sender string type must be the same as the target for the target to run the payload_callback.
callback (Callable, optional): Registers a callback.
drag_callback (Callable, optional): Registers a drag callback for drag and drop.
drop_callback (Callable, optional): Registers a drop callback for drag and drop.
show (bool, optional): Attempt to render widget.
enabled (bool, optional): Turns off functionality of widget and applies the disabled theme.
pos (Union[List[int], Tuple[int, ...]], optional): Places the item relative to window coordinates, [0,0] is top left.
filter_key (str, optional): Used by filter widget.
tracked (bool, optional): Scroll tracking
track_offset (float, optional): 0.0f:top, 0.5f:center, 1.0f:bottom
default_value (Union[List[int], Tuple[int, ...]], optional):
size (int, optional): Number of ints to be displayed.
no_input (bool, optional): Disable direct entry methods double-click or ctrl+click or Enter key allowing to input text directly into the item.
clamped (bool, optional): Applies the min and max limits to direct entry methods also such as double click and CTRL+Click.
min_value (int, optional): Applies a limit only to sliding entry only.
max_value (int, optional): Applies a limit only to sliding entry only.
format (str, optional): Determines the format the int will be displayed as use python string formatting.
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_slider_intx(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, width=width, indent=indent, parent=parent, before=before, source=source, payload_type=payload_type, callback=callback, drag_callback=drag_callback, drop_callback=drop_callback, show=show, enabled=enabled, pos=pos, filter_key=filter_key, tracked=tracked, track_offset=track_offset, default_value=default_value, size=size, no_input=no_input, clamped=clamped, min_value=min_value, max_value=max_value, format=format, **kwargs)
def add_spacer(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, width: int =0, height: int =0, indent: int =-1, parent: Union[int, str] =0, before: Union[int, str] =0, show: bool =True, pos: Union[List[int], Tuple[int, ...]] =[], **kwargs) -> Union[int, str]:
""" Adds a spacer item that can be used to help with layouts or can be used as a placeholder item.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
width (int, optional): Width of the item.
height (int, optional): Height of the item.
indent (int, optional): Offsets the widget to the right the specified number multiplied by the indent style.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
show (bool, optional): Attempt to render widget.
pos (Union[List[int], Tuple[int, ...]], optional): Places the item relative to window coordinates, [0,0] is top left.
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_spacer(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, width=width, height=height, indent=indent, parent=parent, before=before, show=show, pos=pos, **kwargs)
def add_stage(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, **kwargs) -> Union[int, str]:
""" Adds a stage.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_stage(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, **kwargs)
def add_stair_series(x : Union[List[float], Tuple[float, ...]], y : Union[List[float], Tuple[float, ...]], *, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, parent: Union[int, str] =0, before: Union[int, str] =0, source: Union[int, str] =0, show: bool =True, **kwargs) -> Union[int, str]:
""" Adds a stair series to a plot.
Args:
x (Any):
y (Any):
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
source (Union[int, str], optional): Overrides 'id' as value storage key.
show (bool, optional): Attempt to render widget.
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_stair_series(x, y, label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, parent=parent, before=before, source=source, show=show, **kwargs)
def add_static_texture(width : int, height : int, default_value : Union[List[float], Tuple[float, ...]], *, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, parent: Union[int, str] =internal_dpg.mvReservedUUID_2, **kwargs) -> Union[int, str]:
""" Adds a static texture.
Args:
width (int):
height (int):
default_value (Union[List[float], Tuple[float, ...]]):
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_static_texture(width, height, default_value, label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, parent=parent, **kwargs)
def add_stem_series(x : Union[List[float], Tuple[float, ...]], y : Union[List[float], Tuple[float, ...]], *, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, indent: int =-1, parent: Union[int, str] =0, before: Union[int, str] =0, source: Union[int, str] =0, show: bool =True, **kwargs) -> Union[int, str]:
""" Adds a stem series to a plot.
Args:
x (Any):
y (Any):
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
indent (int, optional): Offsets the widget to the right the specified number multiplied by the indent style.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
source (Union[int, str], optional): Overrides 'id' as value storage key.
show (bool, optional): Attempt to render widget.
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_stem_series(x, y, label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, indent=indent, parent=parent, before=before, source=source, show=show, **kwargs)
def add_string_value(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, source: Union[int, str] =0, default_value: str ='', parent: Union[int, str] =internal_dpg.mvReservedUUID_3, **kwargs) -> Union[int, str]:
""" Adds a string value.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
source (Union[int, str], optional): Overrides 'id' as value storage key.
default_value (str, optional):
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_string_value(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, source=source, default_value=default_value, parent=parent, **kwargs)
def add_subplots(rows : int, columns : int, *, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, width: int =0, height: int =0, indent: int =-1, parent: Union[int, str] =0, before: Union[int, str] =0, callback: Callable =None, show: bool =True, pos: Union[List[int], Tuple[int, ...]] =[], filter_key: str ='', delay_search: bool =False, tracked: bool =False, track_offset: float =0.5, row_ratios: Union[List[float], Tuple[float, ...]] =[], column_ratios: Union[List[float], Tuple[float, ...]] =[], no_title: bool =False, no_menus: bool =False, no_resize: bool =False, no_align: bool =False, link_rows: bool =False, link_columns: bool =False, link_all_x: bool =False, link_all_y: bool =False, column_major: bool =False, **kwargs) -> Union[int, str]:
""" Adds a collection of plots.
Args:
rows (int):
columns (int):
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
width (int, optional): Width of the item.
height (int, optional): Height of the item.
indent (int, optional): Offsets the widget to the right the specified number multiplied by the indent style.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
callback (Callable, optional): Registers a callback.
show (bool, optional): Attempt to render widget.
pos (Union[List[int], Tuple[int, ...]], optional): Places the item relative to window coordinates, [0,0] is top left.
filter_key (str, optional): Used by filter widget.
delay_search (bool, optional): Delays searching container for specified items until the end of the app. Possible optimization when a container has many children that are not accessed often.
tracked (bool, optional): Scroll tracking
track_offset (float, optional): 0.0f:top, 0.5f:center, 1.0f:bottom
row_ratios (Union[List[float], Tuple[float, ...]], optional):
column_ratios (Union[List[float], Tuple[float, ...]], optional):
no_title (bool, optional):
no_menus (bool, optional): the user will not be able to open context menus with right-click
no_resize (bool, optional): resize splitters between subplot cells will be not be provided
no_align (bool, optional): subplot edges will not be aligned vertically or horizontally
link_rows (bool, optional): link the y-axis limits of all plots in each row (does not apply auxiliary y-axes)
link_columns (bool, optional): link the x-axis limits of all plots in each column
link_all_x (bool, optional): link the x-axis limits in every plot in the subplot
link_all_y (bool, optional): link the y-axis limits in every plot in the subplot (does not apply to auxiliary y-axes)
column_major (bool, optional): subplots are added in column major order instead of the default row major order
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_subplots(rows, columns, label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, width=width, height=height, indent=indent, parent=parent, before=before, callback=callback, show=show, pos=pos, filter_key=filter_key, delay_search=delay_search, tracked=tracked, track_offset=track_offset, row_ratios=row_ratios, column_ratios=column_ratios, no_title=no_title, no_menus=no_menus, no_resize=no_resize, no_align=no_align, link_rows=link_rows, link_columns=link_columns, link_all_x=link_all_x, link_all_y=link_all_y, column_major=column_major, **kwargs)
def add_tab(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, indent: int =-1, parent: Union[int, str] =0, before: Union[int, str] =0, payload_type: str ='$$DPG_PAYLOAD', drop_callback: Callable =None, show: bool =True, filter_key: str ='', delay_search: bool =False, tracked: bool =False, track_offset: float =0.5, closable: bool =False, no_tooltip: bool =False, order_mode: bool =0, **kwargs) -> Union[int, str]:
""" Adds a tab to a tab bar.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
indent (int, optional): Offsets the widget to the right the specified number multiplied by the indent style.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
payload_type (str, optional): Sender string type must be the same as the target for the target to run the payload_callback.
drop_callback (Callable, optional): Registers a drop callback for drag and drop.
show (bool, optional): Attempt to render widget.
filter_key (str, optional): Used by filter widget.
delay_search (bool, optional): Delays searching container for specified items until the end of the app. Possible optimization when a container has many children that are not accessed often.
tracked (bool, optional): Scroll tracking
track_offset (float, optional): 0.0f:top, 0.5f:center, 1.0f:bottom
closable (bool, optional): Creates a button on the tab that can hide the tab.
no_tooltip (bool, optional): Disable tooltip for the given tab.
order_mode (bool, optional): set using a constant: mvTabOrder_Reorderable: allows reordering, mvTabOrder_Fixed: fixed ordering, mvTabOrder_Leading: adds tab to front, mvTabOrder_Trailing: adds tab to back
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_tab(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, indent=indent, parent=parent, before=before, payload_type=payload_type, drop_callback=drop_callback, show=show, filter_key=filter_key, delay_search=delay_search, tracked=tracked, track_offset=track_offset, closable=closable, no_tooltip=no_tooltip, order_mode=order_mode, **kwargs)
def add_tab_bar(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, indent: int =-1, parent: Union[int, str] =0, before: Union[int, str] =0, callback: Callable =None, show: bool =True, pos: Union[List[int], Tuple[int, ...]] =[], filter_key: str ='', delay_search: bool =False, tracked: bool =False, track_offset: float =0.5, reorderable: bool =False, **kwargs) -> Union[int, str]:
""" Adds a tab bar.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
indent (int, optional): Offsets the widget to the right the specified number multiplied by the indent style.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
callback (Callable, optional): Registers a callback.
show (bool, optional): Attempt to render widget.
pos (Union[List[int], Tuple[int, ...]], optional): Places the item relative to window coordinates, [0,0] is top left.
filter_key (str, optional): Used by filter widget.
delay_search (bool, optional): Delays searching container for specified items until the end of the app. Possible optimization when a container has many children that are not accessed often.
tracked (bool, optional): Scroll tracking
track_offset (float, optional): 0.0f:top, 0.5f:center, 1.0f:bottom
reorderable (bool, optional): Allows for the user to change the order of the tabs.
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_tab_bar(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, indent=indent, parent=parent, before=before, callback=callback, show=show, pos=pos, filter_key=filter_key, delay_search=delay_search, tracked=tracked, track_offset=track_offset, reorderable=reorderable, **kwargs)
def add_tab_button(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, indent: int =-1, parent: Union[int, str] =0, before: Union[int, str] =0, payload_type: str ='$$DPG_PAYLOAD', callback: Callable =None, drag_callback: Callable =None, drop_callback: Callable =None, show: bool =True, filter_key: str ='', tracked: bool =False, track_offset: float =0.5, no_reorder: bool =False, leading: bool =False, trailing: bool =False, no_tooltip: bool =False, **kwargs) -> Union[int, str]:
""" Adds a tab button to a tab bar.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
indent (int, optional): Offsets the widget to the right the specified number multiplied by the indent style.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
payload_type (str, optional): Sender string type must be the same as the target for the target to run the payload_callback.
callback (Callable, optional): Registers a callback.
drag_callback (Callable, optional): Registers a drag callback for drag and drop.
drop_callback (Callable, optional): Registers a drop callback for drag and drop.
show (bool, optional): Attempt to render widget.
filter_key (str, optional): Used by filter widget.
tracked (bool, optional): Scroll tracking
track_offset (float, optional): 0.0f:top, 0.5f:center, 1.0f:bottom
no_reorder (bool, optional): Disable reordering this tab or having another tab cross over this tab. Fixes the position of this tab in relation to the order of neighboring tabs at start.
leading (bool, optional): Enforce the tab position to the left of the tab bar (after the tab list popup button).
trailing (bool, optional): Enforce the tab position to the right of the tab bar (before the scrolling buttons).
no_tooltip (bool, optional): Disable tooltip for the given tab.
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_tab_button(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, indent=indent, parent=parent, before=before, payload_type=payload_type, callback=callback, drag_callback=drag_callback, drop_callback=drop_callback, show=show, filter_key=filter_key, tracked=tracked, track_offset=track_offset, no_reorder=no_reorder, leading=leading, trailing=trailing, no_tooltip=no_tooltip, **kwargs)
def add_table(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, width: int =0, height: int =0, indent: int =-1, parent: Union[int, str] =0, before: Union[int, str] =0, source: Union[int, str] =0, callback: Callable =None, show: bool =True, pos: Union[List[int], Tuple[int, ...]] =[], filter_key: str ='', delay_search: bool =False, header_row: bool =True, clipper: bool =False, inner_width: int =0, policy: int =0, freeze_rows: int =0, freeze_columns: int =0, sort_multi: bool =False, sort_tristate: bool =False, resizable: bool =False, reorderable: bool =False, hideable: bool =False, sortable: bool =False, context_menu_in_body: bool =False, row_background: bool =False, borders_innerH: bool =False, borders_outerH: bool =False, borders_innerV: bool =False, borders_outerV: bool =False, no_host_extendX: bool =False, no_host_extendY: bool =False, no_keep_columns_visible: bool =False, precise_widths: bool =False, no_clip: bool =False, pad_outerX: bool =False, no_pad_outerX: bool =False, no_pad_innerX: bool =False, scrollX: bool =False, scrollY: bool =False, no_saved_settings: bool =False, **kwargs) -> Union[int, str]:
""" Adds a table.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
width (int, optional): Width of the item.
height (int, optional): Height of the item.
indent (int, optional): Offsets the widget to the right the specified number multiplied by the indent style.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
source (Union[int, str], optional): Overrides 'id' as value storage key.
callback (Callable, optional): Registers a callback.
show (bool, optional): Attempt to render widget.
pos (Union[List[int], Tuple[int, ...]], optional): Places the item relative to window coordinates, [0,0] is top left.
filter_key (str, optional): Used by filter widget.
delay_search (bool, optional): Delays searching container for specified items until the end of the app. Possible optimization when a container has many children that are not accessed often.
header_row (bool, optional): show headers at the top of the columns
clipper (bool, optional): Use clipper (rows must be same height).
inner_width (int, optional):
policy (int, optional):
freeze_rows (int, optional):
freeze_columns (int, optional):
sort_multi (bool, optional): Hold shift when clicking headers to sort on multiple column.
sort_tristate (bool, optional): Allow no sorting, disable default sorting.
resizable (bool, optional): Enable resizing columns
reorderable (bool, optional): Enable reordering columns in header row (need calling TableSetupColumn() + TableHeadersRow() to display headers)
hideable (bool, optional): Enable hiding/disabling columns in context menu.
sortable (bool, optional): Enable sorting. Call TableGetSortSpecs() to obtain sort specs. Also see ImGuiTableFlags_SortMulti and ImGuiTableFlags_SortTristate.
context_menu_in_body (bool, optional): Right-click on columns body/contents will display table context menu. By default it is available in TableHeadersRow().
row_background (bool, optional): Set each RowBg color with ImGuiCol_TableRowBg or ImGuiCol_TableRowBgAlt (equivalent of calling TableSetBgColor with ImGuiTableBgFlags_RowBg0 on each row manually)
borders_innerH (bool, optional): Draw horizontal borders between rows.
borders_outerH (bool, optional): Draw horizontal borders at the top and bottom.
borders_innerV (bool, optional): Draw vertical borders between columns.
borders_outerV (bool, optional): Draw vertical borders on the left and right sides.
no_host_extendX (bool, optional): Make outer width auto-fit to columns, overriding outer_size.x value. Only available when ScrollX/ScrollY are disabled and Stretch columns are not used.
no_host_extendY (bool, optional): Make outer height stop exactly at outer_size.y (prevent auto-extending table past the limit). Only available when ScrollX/ScrollY are disabled. Data below the limit will be clipped and not visible.
no_keep_columns_visible (bool, optional): Disable keeping column always minimally visible when ScrollX is off and table gets too small. Not recommended if columns are resizable.
precise_widths (bool, optional): Disable distributing remainder width to stretched columns (width allocation on a 100-wide table with 3 columns: Without this flag: 33,33,34. With this flag: 33,33,33). With larger number of columns, resizing will appear to be less smooth.
no_clip (bool, optional): Disable clipping rectangle for every individual columns.
pad_outerX (bool, optional): Default if BordersOuterV is on. Enable outer-most padding. Generally desirable if you have headers.
no_pad_outerX (bool, optional): Default if BordersOuterV is off. Disable outer-most padding.
no_pad_innerX (bool, optional): Disable inner padding between columns (double inner padding if BordersOuterV is on, single inner padding if BordersOuterV is off).
scrollX (bool, optional): Enable horizontal scrolling. Require 'outer_size' parameter of BeginTable() to specify the container size. Changes default sizing policy. Because this create a child window, ScrollY is currently generally recommended when using ScrollX.
scrollY (bool, optional): Enable vertical scrolling.
no_saved_settings (bool, optional): Never load/save settings in .ini file.
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_table(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, width=width, height=height, indent=indent, parent=parent, before=before, source=source, callback=callback, show=show, pos=pos, filter_key=filter_key, delay_search=delay_search, header_row=header_row, clipper=clipper, inner_width=inner_width, policy=policy, freeze_rows=freeze_rows, freeze_columns=freeze_columns, sort_multi=sort_multi, sort_tristate=sort_tristate, resizable=resizable, reorderable=reorderable, hideable=hideable, sortable=sortable, context_menu_in_body=context_menu_in_body, row_background=row_background, borders_innerH=borders_innerH, borders_outerH=borders_outerH, borders_innerV=borders_innerV, borders_outerV=borders_outerV, no_host_extendX=no_host_extendX, no_host_extendY=no_host_extendY, no_keep_columns_visible=no_keep_columns_visible, precise_widths=precise_widths, no_clip=no_clip, pad_outerX=pad_outerX, no_pad_outerX=no_pad_outerX, no_pad_innerX=no_pad_innerX, scrollX=scrollX, scrollY=scrollY, no_saved_settings=no_saved_settings, **kwargs)
def add_table_cell(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, height: int =0, parent: Union[int, str] =0, before: Union[int, str] =0, show: bool =True, filter_key: str ='', **kwargs) -> Union[int, str]:
""" Adds a table.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
height (int, optional): Height of the item.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
show (bool, optional): Attempt to render widget.
filter_key (str, optional): Used by filter widget.
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_table_cell(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, height=height, parent=parent, before=before, show=show, filter_key=filter_key, **kwargs)
def add_table_column(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, width: int =0, parent: Union[int, str] =0, before: Union[int, str] =0, show: bool =True, enabled: bool =True, init_width_or_weight: float =0.0, default_hide: bool =False, default_sort: bool =False, width_stretch: bool =False, width_fixed: bool =False, no_resize: bool =False, no_reorder: bool =False, no_hide: bool =False, no_clip: bool =False, no_sort: bool =False, no_sort_ascending: bool =False, no_sort_descending: bool =False, no_header_width: bool =False, prefer_sort_ascending: bool =True, prefer_sort_descending: bool =False, indent_enable: bool =False, indent_disable: bool =False, **kwargs) -> Union[int, str]:
""" Adds a table column.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
width (int, optional): Width of the item.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
show (bool, optional): Attempt to render widget.
enabled (bool, optional): Turns off functionality of widget and applies the disabled theme.
init_width_or_weight (float, optional):
default_hide (bool, optional): Default as a hidden/disabled column.
default_sort (bool, optional): Default as a sorting column.
width_stretch (bool, optional): Column will stretch. Preferable with horizontal scrolling disabled (default if table sizing policy is _SizingStretchSame or _SizingStretchProp).
width_fixed (bool, optional): Column will not stretch. Preferable with horizontal scrolling enabled (default if table sizing policy is _SizingFixedFit and table is resizable).
no_resize (bool, optional): Disable manual resizing.
no_reorder (bool, optional): Disable manual reordering this column, this will also prevent other columns from crossing over this column.
no_hide (bool, optional): Disable ability to hide/disable this column.
no_clip (bool, optional): Disable clipping for this column (all NoClip columns will render in a same draw command).
no_sort (bool, optional): Disable ability to sort on this field (even if ImGuiTableFlags_Sortable is set on the table).
no_sort_ascending (bool, optional): Disable ability to sort in the ascending direction.
no_sort_descending (bool, optional): Disable ability to sort in the descending direction.
no_header_width (bool, optional): Disable header text width contribution to automatic column width.
prefer_sort_ascending (bool, optional): Make the initial sort direction Ascending when first sorting on this column (default).
prefer_sort_descending (bool, optional): Make the initial sort direction Descending when first sorting on this column.
indent_enable (bool, optional): Use current Indent value when entering cell (default for column 0).
indent_disable (bool, optional): Ignore current Indent value when entering cell (default for columns > 0). Indentation changes _within_ the cell will still be honored.
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_table_column(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, width=width, parent=parent, before=before, show=show, enabled=enabled, init_width_or_weight=init_width_or_weight, default_hide=default_hide, default_sort=default_sort, width_stretch=width_stretch, width_fixed=width_fixed, no_resize=no_resize, no_reorder=no_reorder, no_hide=no_hide, no_clip=no_clip, no_sort=no_sort, no_sort_ascending=no_sort_ascending, no_sort_descending=no_sort_descending, no_header_width=no_header_width, prefer_sort_ascending=prefer_sort_ascending, prefer_sort_descending=prefer_sort_descending, indent_enable=indent_enable, indent_disable=indent_disable, **kwargs)
def add_table_row(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, height: int =0, parent: Union[int, str] =0, before: Union[int, str] =0, show: bool =True, filter_key: str ='', **kwargs) -> Union[int, str]:
""" Adds a table row.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
height (int, optional): Height of the item.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
show (bool, optional): Attempt to render widget.
filter_key (str, optional): Used by filter widget.
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_table_row(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, height=height, parent=parent, before=before, show=show, filter_key=filter_key, **kwargs)
def add_template_registry(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, **kwargs) -> Union[int, str]:
""" Adds a template registry.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_template_registry(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, **kwargs)
def add_text(default_value : str ='', *, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, indent: int =-1, parent: Union[int, str] =0, before: Union[int, str] =0, source: Union[int, str] =0, payload_type: str ='$$DPG_PAYLOAD', drag_callback: Callable =None, drop_callback: Callable =None, show: bool =True, pos: Union[List[int], Tuple[int, ...]] =[], filter_key: str ='', tracked: bool =False, track_offset: float =0.5, wrap: int =-1, bullet: bool =False, color: Union[List[int], Tuple[int, ...]] =(-255, 0, 0, 255), show_label: bool =False, **kwargs) -> Union[int, str]:
""" Adds text. Text can have an optional label that will display to the right of the text.
Args:
default_value (str, optional):
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
indent (int, optional): Offsets the widget to the right the specified number multiplied by the indent style.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
source (Union[int, str], optional): Overrides 'id' as value storage key.
payload_type (str, optional): Sender string type must be the same as the target for the target to run the payload_callback.
drag_callback (Callable, optional): Registers a drag callback for drag and drop.
drop_callback (Callable, optional): Registers a drop callback for drag and drop.
show (bool, optional): Attempt to render widget.
pos (Union[List[int], Tuple[int, ...]], optional): Places the item relative to window coordinates, [0,0] is top left.
filter_key (str, optional): Used by filter widget.
tracked (bool, optional): Scroll tracking
track_offset (float, optional): 0.0f:top, 0.5f:center, 1.0f:bottom
wrap (int, optional): Number of pixels from the start of the item until wrapping starts.
bullet (bool, optional): Places a bullet to the left of the text.
color (Union[List[int], Tuple[int, ...]], optional): Color of the text (rgba).
show_label (bool, optional): Displays the label to the right of the text.
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_text(default_value, label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, indent=indent, parent=parent, before=before, source=source, payload_type=payload_type, drag_callback=drag_callback, drop_callback=drop_callback, show=show, pos=pos, filter_key=filter_key, tracked=tracked, track_offset=track_offset, wrap=wrap, bullet=bullet, color=color, show_label=show_label, **kwargs)
def add_text_point(x : float, y : float, *, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, parent: Union[int, str] =0, before: Union[int, str] =0, source: Union[int, str] =0, show: bool =True, x_offset: int =..., y_offset: int =..., vertical: bool =False, **kwargs) -> Union[int, str]:
""" Adds a label series to a plot.
Args:
x (float):
y (float):
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
source (Union[int, str], optional): Overrides 'id' as value storage key.
show (bool, optional): Attempt to render widget.
x_offset (int, optional):
y_offset (int, optional):
vertical (bool, optional):
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_text_point(x, y, label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, parent=parent, before=before, source=source, show=show, x_offset=x_offset, y_offset=y_offset, vertical=vertical, **kwargs)
def add_texture_registry(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, show: bool =False, **kwargs) -> Union[int, str]:
""" Adds a dynamic texture.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
show (bool, optional): Attempt to render widget.
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_texture_registry(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, show=show, **kwargs)
def add_theme(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, **kwargs) -> Union[int, str]:
""" Adds a theme.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
id (Union[int, str], optional): (deprecated)
default_theme (bool, optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
if 'default_theme' in kwargs.keys():
warnings.warn('default_theme keyword removed', DeprecationWarning, 2)
kwargs.pop('default_theme', None)
return internal_dpg.add_theme(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, **kwargs)
def add_theme_color(target : int =0, value : Union[List[int], Tuple[int, ...]] =(0, 0, 0, 255), *, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, parent: Union[int, str] =0, category: int =0, **kwargs) -> Union[int, str]:
""" Adds a theme color.
Args:
target (int, optional):
value (Union[List[int], Tuple[int, ...]], optional):
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
category (int, optional): Options include mvThemeCat_Core, mvThemeCat_Plots, mvThemeCat_Nodes.
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_theme_color(target, value, label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, parent=parent, category=category, **kwargs)
def add_theme_component(item_type : int =0, *, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, parent: Union[int, str] =0, before: Union[int, str] =0, enabled_state: bool =True, **kwargs) -> Union[int, str]:
""" Adds a theme component.
Args:
item_type (int, optional):
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
enabled_state (bool, optional):
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_theme_component(item_type, label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, parent=parent, before=before, enabled_state=enabled_state, **kwargs)
def add_theme_style(target : int =0, x : float =1.0, y : float =-1.0, *, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, parent: Union[int, str] =0, category: int =0, **kwargs) -> Union[int, str]:
""" Adds a theme style.
Args:
target (int, optional):
x (float, optional):
y (float, optional):
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
category (int, optional): Options include mvThemeCat_Core, mvThemeCat_Plots, mvThemeCat_Nodes.
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_theme_style(target, x, y, label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, parent=parent, category=category, **kwargs)
def add_time_picker(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, indent: int =-1, parent: Union[int, str] =0, before: Union[int, str] =0, payload_type: str ='$$DPG_PAYLOAD', callback: Callable =None, drag_callback: Callable =None, drop_callback: Callable =None, show: bool =True, pos: Union[List[int], Tuple[int, ...]] =[], filter_key: str ='', tracked: bool =False, track_offset: float =0.5, default_value: dict ={'hour': 14, 'min': 32, 'sec': 23}, hour24: bool =False, **kwargs) -> Union[int, str]:
""" Adds a time picker.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
indent (int, optional): Offsets the widget to the right the specified number multiplied by the indent style.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
payload_type (str, optional): Sender string type must be the same as the target for the target to run the payload_callback.
callback (Callable, optional): Registers a callback.
drag_callback (Callable, optional): Registers a drag callback for drag and drop.
drop_callback (Callable, optional): Registers a drop callback for drag and drop.
show (bool, optional): Attempt to render widget.
pos (Union[List[int], Tuple[int, ...]], optional): Places the item relative to window coordinates, [0,0] is top left.
filter_key (str, optional): Used by filter widget.
tracked (bool, optional): Scroll tracking
track_offset (float, optional): 0.0f:top, 0.5f:center, 1.0f:bottom
default_value (dict, optional):
hour24 (bool, optional): Show 24 hour clock instead of 12 hour.
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_time_picker(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, indent=indent, parent=parent, before=before, payload_type=payload_type, callback=callback, drag_callback=drag_callback, drop_callback=drop_callback, show=show, pos=pos, filter_key=filter_key, tracked=tracked, track_offset=track_offset, default_value=default_value, hour24=hour24, **kwargs)
def add_tooltip(parent : Union[int, str], *, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, show: bool =True, **kwargs) -> Union[int, str]:
""" Adds a tooltip window.
Args:
parent (Union[int, str]):
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
show (bool, optional): Attempt to render widget.
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_tooltip(parent, label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, show=show, **kwargs)
def add_tree_node(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, indent: int =-1, parent: Union[int, str] =0, before: Union[int, str] =0, payload_type: str ='$$DPG_PAYLOAD', drag_callback: Callable =None, drop_callback: Callable =None, show: bool =True, pos: Union[List[int], Tuple[int, ...]] =[], filter_key: str ='', delay_search: bool =False, tracked: bool =False, track_offset: float =0.5, default_open: bool =False, open_on_double_click: bool =False, open_on_arrow: bool =False, leaf: bool =False, bullet: bool =False, selectable: bool =False, **kwargs) -> Union[int, str]:
""" Adds a tree node to add items to.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
indent (int, optional): Offsets the widget to the right the specified number multiplied by the indent style.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
payload_type (str, optional): Sender string type must be the same as the target for the target to run the payload_callback.
drag_callback (Callable, optional): Registers a drag callback for drag and drop.
drop_callback (Callable, optional): Registers a drop callback for drag and drop.
show (bool, optional): Attempt to render widget.
pos (Union[List[int], Tuple[int, ...]], optional): Places the item relative to window coordinates, [0,0] is top left.
filter_key (str, optional): Used by filter widget.
delay_search (bool, optional): Delays searching container for specified items until the end of the app. Possible optimization when a container has many children that are not accessed often.
tracked (bool, optional): Scroll tracking
track_offset (float, optional): 0.0f:top, 0.5f:center, 1.0f:bottom
default_open (bool, optional): Sets the tree node open by default.
open_on_double_click (bool, optional): Need double-click to open node.
open_on_arrow (bool, optional): Only open when clicking on the arrow part.
leaf (bool, optional): No collapsing, no arrow (use as a convenience for leaf nodes).
bullet (bool, optional): Display a bullet instead of arrow.
selectable (bool, optional): Makes the tree selectable.
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_tree_node(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, indent=indent, parent=parent, before=before, payload_type=payload_type, drag_callback=drag_callback, drop_callback=drop_callback, show=show, pos=pos, filter_key=filter_key, delay_search=delay_search, tracked=tracked, track_offset=track_offset, default_open=default_open, open_on_double_click=open_on_double_click, open_on_arrow=open_on_arrow, leaf=leaf, bullet=bullet, selectable=selectable, **kwargs)
def add_value_registry(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, **kwargs) -> Union[int, str]:
""" Adds a value registry.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_value_registry(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, **kwargs)
def add_viewport_drawlist(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, show: bool =True, filter_key: str ='', delay_search: bool =False, front: bool =True, **kwargs) -> Union[int, str]:
""" A container that is used to present draw items or layers directly to the viewport. By default this will draw to the back of the viewport. Layers and draw items should be added to this widget as children.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
show (bool, optional): Attempt to render widget.
filter_key (str, optional): Used by filter widget.
delay_search (bool, optional): Delays searching container for specified items until the end of the app. Possible optimization when a container has many children that are not accessed often.
front (bool, optional): Draws to the front of the view port instead of the back.
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_viewport_drawlist(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, show=show, filter_key=filter_key, delay_search=delay_search, front=front, **kwargs)
def add_viewport_menu_bar(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, indent: int =-1, parent: Union[int, str] =0, show: bool =True, delay_search: bool =False, **kwargs) -> Union[int, str]:
""" Adds a menubar to the viewport.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
indent (int, optional): Offsets the widget to the right the specified number multiplied by the indent style.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
show (bool, optional): Attempt to render widget.
delay_search (bool, optional): Delays searching container for specified items until the end of the app. Possible optimization when a container has many children that are not accessed often.
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_viewport_menu_bar(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, indent=indent, parent=parent, show=show, delay_search=delay_search, **kwargs)
def add_vline_series(x : Union[List[float], Tuple[float, ...]], *, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, parent: Union[int, str] =0, before: Union[int, str] =0, source: Union[int, str] =0, show: bool =True, **kwargs) -> Union[int, str]:
""" Adds an infinite vertical line series to a plot.
Args:
x (Any):
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
source (Union[int, str], optional): Overrides 'id' as value storage key.
show (bool, optional): Attempt to render widget.
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_vline_series(x, label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, parent=parent, before=before, source=source, show=show, **kwargs)
def add_window(*, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, width: int =0, height: int =0, indent: int =-1, show: bool =True, pos: Union[List[int], Tuple[int, ...]] =[], delay_search: bool =False, min_size: Union[List[int], Tuple[int, ...]] =[100, 100], max_size: Union[List[int], Tuple[int, ...]] =[30000, 30000], menubar: bool =False, collapsed: bool =False, autosize: bool =False, no_resize: bool =False, no_title_bar: bool =False, no_move: bool =False, no_scrollbar: bool =False, no_collapse: bool =False, horizontal_scrollbar: bool =False, no_focus_on_appearing: bool =False, no_bring_to_front_on_focus: bool =False, no_close: bool =False, no_background: bool =False, modal: bool =False, popup: bool =False, no_saved_settings: bool =False, no_open_over_existing_popup: bool =True, on_close: Callable =None, **kwargs) -> Union[int, str]:
""" Creates a new window for following items to be added to.
Args:
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
width (int, optional): Width of the item.
height (int, optional): Height of the item.
indent (int, optional): Offsets the widget to the right the specified number multiplied by the indent style.
show (bool, optional): Attempt to render widget.
pos (Union[List[int], Tuple[int, ...]], optional): Places the item relative to window coordinates, [0,0] is top left.
delay_search (bool, optional): Delays searching container for specified items until the end of the app. Possible optimization when a container has many children that are not accessed often.
min_size (Union[List[int], Tuple[int, ...]], optional): Minimum window size.
max_size (Union[List[int], Tuple[int, ...]], optional): Maximum window size.
menubar (bool, optional): Shows or hides the menubar.
collapsed (bool, optional): Collapse the window.
autosize (bool, optional): Autosized the window to fit it's items.
no_resize (bool, optional): Allows for the window size to be changed or fixed.
no_title_bar (bool, optional): Title name for the title bar of the window.
no_move (bool, optional): Allows for the window's position to be changed or fixed.
no_scrollbar (bool, optional): Disable scrollbars. (window can still scroll with mouse or programmatically)
no_collapse (bool, optional): Disable user collapsing window by double-clicking on it.
horizontal_scrollbar (bool, optional): Allow horizontal scrollbar to appear. (off by default)
no_focus_on_appearing (bool, optional): Disable taking focus when transitioning from hidden to visible state.
no_bring_to_front_on_focus (bool, optional): Disable bringing window to front when taking focus. (e.g. clicking on it or programmatically giving it focus)
no_close (bool, optional): Disable user closing the window by removing the close button.
no_background (bool, optional): Sets Background and border alpha to transparent.
modal (bool, optional): Fills area behind window according to the theme and disables user ability to interact with anything except the window.
popup (bool, optional): Fills area behind window according to the theme, removes title bar, collapse and close. Window can be closed by selecting area in the background behind the window.
no_saved_settings (bool, optional): Never load/save settings in .ini file.
no_open_over_existing_popup (bool, optional): Don't open if there's already a popup
on_close (Callable, optional): Callback ran when window is closed.
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.add_window(label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, width=width, height=height, indent=indent, show=show, pos=pos, delay_search=delay_search, min_size=min_size, max_size=max_size, menubar=menubar, collapsed=collapsed, autosize=autosize, no_resize=no_resize, no_title_bar=no_title_bar, no_move=no_move, no_scrollbar=no_scrollbar, no_collapse=no_collapse, horizontal_scrollbar=horizontal_scrollbar, no_focus_on_appearing=no_focus_on_appearing, no_bring_to_front_on_focus=no_bring_to_front_on_focus, no_close=no_close, no_background=no_background, modal=modal, popup=popup, no_saved_settings=no_saved_settings, no_open_over_existing_popup=no_open_over_existing_popup, on_close=on_close, **kwargs)
def apply_transform(item : Union[int, str], transform : Any, **kwargs) -> None:
""" New in 1.1. Applies a transformation matrix to a layer.
Args:
item (Union[int, str]): Drawing node to apply transform to.
transform (Any): Transformation matrix.
Returns:
None
"""
return internal_dpg.apply_transform(item, transform, **kwargs)
def bind_colormap(item : Union[int, str], source : Union[int, str], **kwargs) -> None:
""" Sets the color map for widgets that accept it.
Args:
item (Union[int, str]): item that the color map will be applied to
source (Union[int, str]): The colormap tag. This should come from a colormap that was added to a colormap registry. Built in color maps are accessible through their corresponding constants mvPlotColormap_Twilight, mvPlotColormap_***
Returns:
None
"""
return internal_dpg.bind_colormap(item, source, **kwargs)
def bind_font(font : Union[int, str], **kwargs) -> Union[int, str]:
""" Binds a global font.
Args:
font (Union[int, str]):
Returns:
Union[int, str]
"""
return internal_dpg.bind_font(font, **kwargs)
def bind_item_font(item : Union[int, str], font : Union[int, str], **kwargs) -> None:
""" Sets an item's font.
Args:
item (Union[int, str]):
font (Union[int, str]):
Returns:
None
"""
return internal_dpg.bind_item_font(item, font, **kwargs)
def bind_item_handler_registry(item : Union[int, str], handler_registry : Union[int, str], **kwargs) -> None:
""" Binds an item handler registry to an item.
Args:
item (Union[int, str]):
handler_registry (Union[int, str]):
Returns:
None
"""
return internal_dpg.bind_item_handler_registry(item, handler_registry, **kwargs)
def bind_item_theme(item : Union[int, str], theme : Union[int, str], **kwargs) -> None:
""" Binds a theme to an item.
Args:
item (Union[int, str]):
theme (Union[int, str]):
Returns:
None
"""
return internal_dpg.bind_item_theme(item, theme, **kwargs)
def bind_template_registry(template_registry : Union[int, str], **kwargs) -> None:
""" Binds a global template registry.
Args:
template_registry (Union[int, str]):
Returns:
None
"""
return internal_dpg.bind_template_registry(template_registry, **kwargs)
def bind_theme(theme : Union[int, str], **kwargs) -> None:
""" Binds a global theme.
Args:
theme (Union[int, str]):
Returns:
None
"""
return internal_dpg.bind_theme(theme, **kwargs)
def capture_next_item(callback : Callable, *, user_data: Any =None, **kwargs) -> None:
""" Captures the next item.
Args:
callback (Callable):
user_data (Any, optional): New in 1.3. Optional user data to send to the callback
Returns:
None
"""
return internal_dpg.capture_next_item(callback, user_data=user_data, **kwargs)
def clear_selected_links(node_editor : Union[int, str], **kwargs) -> None:
""" Clears a node editor's selected links.
Args:
node_editor (Union[int, str]):
Returns:
None
"""
return internal_dpg.clear_selected_links(node_editor, **kwargs)
def clear_selected_nodes(node_editor : Union[int, str], **kwargs) -> None:
""" Clears a node editor's selected nodes.
Args:
node_editor (Union[int, str]):
Returns:
None
"""
return internal_dpg.clear_selected_nodes(node_editor, **kwargs)
def create_context(**kwargs) -> None:
""" Creates the Dear PyGui context.
Args:
Returns:
None
"""
return internal_dpg.create_context(**kwargs)
def create_fps_matrix(eye : Union[List[float], Tuple[float, ...]], pitch : float, yaw : float, **kwargs) -> Any:
""" New in 1.1. Create a 'first person shooter' matrix.
Args:
eye (Union[List[float], Tuple[float, ...]]): eye position
pitch (float): pitch (in radians)
yaw (float): yaw (in radians)
Returns:
Any
"""
return internal_dpg.create_fps_matrix(eye, pitch, yaw, **kwargs)
def create_lookat_matrix(eye : Union[List[float], Tuple[float, ...]], target : Union[List[float], Tuple[float, ...]], up : Union[List[float], Tuple[float, ...]], **kwargs) -> Any:
""" New in 1.1. Creates a 'Look at matrix'.
Args:
eye (Union[List[float], Tuple[float, ...]]): eye position
target (Union[List[float], Tuple[float, ...]]): target position
up (Union[List[float], Tuple[float, ...]]): up vector
Returns:
Any
"""
return internal_dpg.create_lookat_matrix(eye, target, up, **kwargs)
def create_orthographic_matrix(left : float, right : float, bottom : float, top : float, zNear : float, zFar : float, **kwargs) -> Any:
""" New in 1.1. Creates an orthographic matrix.
Args:
left (float): left plane
right (float): right plane
bottom (float): bottom plane
top (float): top plane
zNear (float): Near clipping plane.
zFar (float): Far clipping plane.
Returns:
Any
"""
return internal_dpg.create_orthographic_matrix(left, right, bottom, top, zNear, zFar, **kwargs)
def create_perspective_matrix(fov : float, aspect : float, zNear : float, zFar : float, **kwargs) -> Any:
""" New in 1.1. Creates a perspective matrix.
Args:
fov (float): Field of view (in radians)
aspect (float): Aspect ratio (width/height)
zNear (float): Near clipping plane.
zFar (float): Far clipping plane.
Returns:
Any
"""
return internal_dpg.create_perspective_matrix(fov, aspect, zNear, zFar, **kwargs)
def create_rotation_matrix(angle : float, axis : Union[List[float], Tuple[float, ...]], **kwargs) -> Any:
""" New in 1.1. Applies a transformation matrix to a layer.
Args:
angle (float): angle to rotate
axis (Union[List[float], Tuple[float, ...]]): axis to rotate around
Returns:
Any
"""
return internal_dpg.create_rotation_matrix(angle, axis, **kwargs)
def create_scale_matrix(scales : Union[List[float], Tuple[float, ...]], **kwargs) -> Any:
""" New in 1.1. Applies a transformation matrix to a layer.
Args:
scales (Union[List[float], Tuple[float, ...]]): scale values per axis
Returns:
Any
"""
return internal_dpg.create_scale_matrix(scales, **kwargs)
def create_translation_matrix(translation : Union[List[float], Tuple[float, ...]], **kwargs) -> Any:
""" New in 1.1. Creates a translation matrix.
Args:
translation (Union[List[float], Tuple[float, ...]]): translation vector
Returns:
Any
"""
return internal_dpg.create_translation_matrix(translation, **kwargs)
def create_viewport(*, title: str ='Dear PyGui', small_icon: str ='', large_icon: str ='', width: int =1280, height: int =800, x_pos: int =100, y_pos: int =100, min_width: int =250, max_width: int =10000, min_height: int =250, max_height: int =10000, resizable: bool =True, vsync: bool =True, always_on_top: bool =False, decorated: bool =True, clear_color: Union[List[float], Tuple[float, ...]] =(0, 0, 0, 255), **kwargs) -> None:
""" Creates a viewport. Viewports are required.
Args:
title (str, optional): Sets the title of the viewport.
small_icon (str, optional): Sets the small icon that is found in the viewport's decorator bar. Must be ***.ico on windows and either ***.ico or ***.png on mac.
large_icon (str, optional): Sets the large icon that is found in the task bar while the app is running. Must be ***.ico on windows and either ***.ico or ***.png on mac.
width (int, optional): Sets the width of the drawable space on the viewport. Does not inclue the border.
height (int, optional): Sets the height of the drawable space on the viewport. Does not inclue the border or decorator bar.
x_pos (int, optional): Sets x position the viewport will be drawn in screen coordinates.
y_pos (int, optional): Sets y position the viewport will be drawn in screen coordinates.
min_width (int, optional): Applies a minimuim limit to the width of the viewport.
max_width (int, optional): Applies a maximum limit to the width of the viewport.
min_height (int, optional): Applies a minimuim limit to the height of the viewport.
max_height (int, optional): Applies a maximum limit to the height of the viewport.
resizable (bool, optional): Enables and Disables user ability to resize the viewport.
vsync (bool, optional): Enables and Disables the renderloop vsync limit. vsync frame value is set by refresh rate of display.
always_on_top (bool, optional): Forces the viewport to always be drawn ontop of all other viewports.
decorated (bool, optional): Enabled and disabled the decorator bar at the top of the viewport.
clear_color (Union[List[float], Tuple[float, ...]], optional): Sets the color of the back of the viewport.
Returns:
None
"""
return internal_dpg.create_viewport(title=title, small_icon=small_icon, large_icon=large_icon, width=width, height=height, x_pos=x_pos, y_pos=y_pos, min_width=min_width, max_width=max_width, min_height=min_height, max_height=max_height, resizable=resizable, vsync=vsync, always_on_top=always_on_top, decorated=decorated, clear_color=clear_color, **kwargs)
def delete_item(item : Union[int, str], *, children_only: bool =False, slot: int =-1, **kwargs) -> None:
""" Deletes an item..
Args:
item (Union[int, str]):
children_only (bool, optional):
slot (int, optional):
Returns:
None
"""
return internal_dpg.delete_item(item, children_only=children_only, slot=slot, **kwargs)
def destroy_context(**kwargs) -> None:
""" Destroys the Dear PyGui context.
Args:
Returns:
None
"""
return internal_dpg.destroy_context(**kwargs)
def does_alias_exist(alias : str, **kwargs) -> bool:
""" Checks if an alias exist.
Args:
alias (str):
Returns:
bool
"""
return internal_dpg.does_alias_exist(alias, **kwargs)
def does_item_exist(item : Union[int, str], **kwargs) -> bool:
""" Checks if an item exist..
Args:
item (Union[int, str]):
Returns:
bool
"""
return internal_dpg.does_item_exist(item, **kwargs)
def draw_arrow(p1 : Union[List[float], Tuple[float, ...]], p2 : Union[List[float], Tuple[float, ...]], *, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, parent: Union[int, str] =0, before: Union[int, str] =0, show: bool =True, color: Union[List[int], Tuple[int, ...]] =(255, 255, 255, 255), thickness: float =1.0, size: int =4, **kwargs) -> Union[int, str]:
""" Adds an arrow.
Args:
p1 (Union[List[float], Tuple[float, ...]]): Arrow tip.
p2 (Union[List[float], Tuple[float, ...]]): Arrow tail.
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
show (bool, optional): Attempt to render widget.
color (Union[List[int], Tuple[int, ...]], optional):
thickness (float, optional):
size (int, optional):
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.draw_arrow(p1, p2, label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, parent=parent, before=before, show=show, color=color, thickness=thickness, size=size, **kwargs)
def draw_bezier_cubic(p1 : Union[List[float], Tuple[float, ...]], p2 : Union[List[float], Tuple[float, ...]], p3 : Union[List[float], Tuple[float, ...]], p4 : Union[List[float], Tuple[float, ...]], *, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, parent: Union[int, str] =0, before: Union[int, str] =0, show: bool =True, color: Union[List[int], Tuple[int, ...]] =(255, 255, 255, 255), thickness: float =1.0, segments: int =0, **kwargs) -> Union[int, str]:
""" Adds a cubic bezier curve.
Args:
p1 (Union[List[float], Tuple[float, ...]]): First point in curve.
p2 (Union[List[float], Tuple[float, ...]]): Second point in curve.
p3 (Union[List[float], Tuple[float, ...]]): Third point in curve.
p4 (Union[List[float], Tuple[float, ...]]): Fourth point in curve.
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
show (bool, optional): Attempt to render widget.
color (Union[List[int], Tuple[int, ...]], optional):
thickness (float, optional):
segments (int, optional): Number of segments to approximate bezier curve.
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.draw_bezier_cubic(p1, p2, p3, p4, label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, parent=parent, before=before, show=show, color=color, thickness=thickness, segments=segments, **kwargs)
def draw_bezier_quadratic(p1 : Union[List[float], Tuple[float, ...]], p2 : Union[List[float], Tuple[float, ...]], p3 : Union[List[float], Tuple[float, ...]], *, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, parent: Union[int, str] =0, before: Union[int, str] =0, show: bool =True, color: Union[List[int], Tuple[int, ...]] =(255, 255, 255, 255), thickness: float =1.0, segments: int =0, **kwargs) -> Union[int, str]:
""" Adds a quadratic bezier curve.
Args:
p1 (Union[List[float], Tuple[float, ...]]): First point in curve.
p2 (Union[List[float], Tuple[float, ...]]): Second point in curve.
p3 (Union[List[float], Tuple[float, ...]]): Third point in curve.
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
show (bool, optional): Attempt to render widget.
color (Union[List[int], Tuple[int, ...]], optional):
thickness (float, optional):
segments (int, optional): Number of segments to approximate bezier curve.
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.draw_bezier_quadratic(p1, p2, p3, label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, parent=parent, before=before, show=show, color=color, thickness=thickness, segments=segments, **kwargs)
def draw_circle(center : Union[List[float], Tuple[float, ...]], radius : float, *, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, parent: Union[int, str] =0, before: Union[int, str] =0, show: bool =True, color: Union[List[int], Tuple[int, ...]] =(255, 255, 255, 255), fill: Union[List[int], Tuple[int, ...]] =(0, 0, 0, -255), thickness: float =1.0, segments: int =0, **kwargs) -> Union[int, str]:
""" Adds a circle
Args:
center (Union[List[float], Tuple[float, ...]]):
radius (float):
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
show (bool, optional): Attempt to render widget.
color (Union[List[int], Tuple[int, ...]], optional):
fill (Union[List[int], Tuple[int, ...]], optional):
thickness (float, optional):
segments (int, optional): Number of segments to approximate circle.
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.draw_circle(center, radius, label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, parent=parent, before=before, show=show, color=color, fill=fill, thickness=thickness, segments=segments, **kwargs)
def draw_ellipse(pmin : Union[List[float], Tuple[float, ...]], pmax : Union[List[float], Tuple[float, ...]], *, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, parent: Union[int, str] =0, before: Union[int, str] =0, show: bool =True, color: Union[List[int], Tuple[int, ...]] =(255, 255, 255, 255), fill: Union[List[int], Tuple[int, ...]] =(0, 0, 0, -255), thickness: float =1.0, segments: int =32, **kwargs) -> Union[int, str]:
""" Adds an ellipse.
Args:
pmin (Union[List[float], Tuple[float, ...]]): Min point of bounding rectangle.
pmax (Union[List[float], Tuple[float, ...]]): Max point of bounding rectangle.
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
show (bool, optional): Attempt to render widget.
color (Union[List[int], Tuple[int, ...]], optional):
fill (Union[List[int], Tuple[int, ...]], optional):
thickness (float, optional):
segments (int, optional): Number of segments to approximate bezier curve.
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.draw_ellipse(pmin, pmax, label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, parent=parent, before=before, show=show, color=color, fill=fill, thickness=thickness, segments=segments, **kwargs)
def draw_image(texture_tag : Union[int, str], pmin : Union[List[float], Tuple[float, ...]], pmax : Union[List[float], Tuple[float, ...]], *, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, parent: Union[int, str] =0, before: Union[int, str] =0, show: bool =True, uv_min: Union[List[float], Tuple[float, ...]] =(0.0, 0.0), uv_max: Union[List[float], Tuple[float, ...]] =(1.0, 1.0), color: Union[List[int], Tuple[int, ...]] =(255, 255, 255, 255), **kwargs) -> Union[int, str]:
""" Adds an image (for a drawing).
Args:
texture_tag (Union[int, str]):
pmin (Union[List[float], Tuple[float, ...]]): Point of to start drawing texture.
pmax (Union[List[float], Tuple[float, ...]]): Point to complete drawing texture.
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
show (bool, optional): Attempt to render widget.
uv_min (Union[List[float], Tuple[float, ...]], optional): Normalized coordinates on texture that will be drawn.
uv_max (Union[List[float], Tuple[float, ...]], optional): Normalized coordinates on texture that will be drawn.
color (Union[List[int], Tuple[int, ...]], optional):
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.draw_image(texture_tag, pmin, pmax, label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, parent=parent, before=before, show=show, uv_min=uv_min, uv_max=uv_max, color=color, **kwargs)
def draw_image_quad(texture_tag : Union[int, str], p1 : Union[List[float], Tuple[float, ...]], p2 : Union[List[float], Tuple[float, ...]], p3 : Union[List[float], Tuple[float, ...]], p4 : Union[List[float], Tuple[float, ...]], *, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, parent: Union[int, str] =0, before: Union[int, str] =0, show: bool =True, uv1: Union[List[float], Tuple[float, ...]] =(0.0, 0.0), uv2: Union[List[float], Tuple[float, ...]] =(1.0, 0.0), uv3: Union[List[float], Tuple[float, ...]] =(1.0, 1.0), uv4: Union[List[float], Tuple[float, ...]] =(0.0, 1.0), color: Union[List[int], Tuple[int, ...]] =(255, 255, 255, 255), **kwargs) -> Union[int, str]:
""" Adds an image (for a drawing).
Args:
texture_tag (Union[int, str]):
p1 (Union[List[float], Tuple[float, ...]]):
p2 (Union[List[float], Tuple[float, ...]]):
p3 (Union[List[float], Tuple[float, ...]]):
p4 (Union[List[float], Tuple[float, ...]]):
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
show (bool, optional): Attempt to render widget.
uv1 (Union[List[float], Tuple[float, ...]], optional): Normalized coordinates on texture that will be drawn.
uv2 (Union[List[float], Tuple[float, ...]], optional): Normalized coordinates on texture that will be drawn.
uv3 (Union[List[float], Tuple[float, ...]], optional): Normalized coordinates on texture that will be drawn.
uv4 (Union[List[float], Tuple[float, ...]], optional): Normalized coordinates on texture that will be drawn.
color (Union[List[int], Tuple[int, ...]], optional):
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.draw_image_quad(texture_tag, p1, p2, p3, p4, label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, parent=parent, before=before, show=show, uv1=uv1, uv2=uv2, uv3=uv3, uv4=uv4, color=color, **kwargs)
def draw_line(p1 : Union[List[float], Tuple[float, ...]], p2 : Union[List[float], Tuple[float, ...]], *, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, parent: Union[int, str] =0, before: Union[int, str] =0, show: bool =True, color: Union[List[int], Tuple[int, ...]] =(255, 255, 255, 255), thickness: float =1.0, **kwargs) -> Union[int, str]:
""" Adds a line.
Args:
p1 (Union[List[float], Tuple[float, ...]]): Start of line.
p2 (Union[List[float], Tuple[float, ...]]): End of line.
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
show (bool, optional): Attempt to render widget.
color (Union[List[int], Tuple[int, ...]], optional):
thickness (float, optional):
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.draw_line(p1, p2, label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, parent=parent, before=before, show=show, color=color, thickness=thickness, **kwargs)
def draw_polygon(points : List[List[float]], *, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, parent: Union[int, str] =0, before: Union[int, str] =0, show: bool =True, color: Union[List[int], Tuple[int, ...]] =(255, 255, 255, 255), fill: Union[List[int], Tuple[int, ...]] =(0, 0, 0, -255), thickness: float =1.0, **kwargs) -> Union[int, str]:
""" Adds a polygon.
Args:
points (List[List[float]]):
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
show (bool, optional): Attempt to render widget.
color (Union[List[int], Tuple[int, ...]], optional):
fill (Union[List[int], Tuple[int, ...]], optional):
thickness (float, optional):
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.draw_polygon(points, label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, parent=parent, before=before, show=show, color=color, fill=fill, thickness=thickness, **kwargs)
def draw_polyline(points : List[List[float]], *, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, parent: Union[int, str] =0, before: Union[int, str] =0, show: bool =True, closed: bool =False, color: Union[List[int], Tuple[int, ...]] =(255, 255, 255, 255), thickness: float =1.0, **kwargs) -> Union[int, str]:
""" Adds a polyline.
Args:
points (List[List[float]]):
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
show (bool, optional): Attempt to render widget.
closed (bool, optional): Will close the polyline by returning to the first point.
color (Union[List[int], Tuple[int, ...]], optional):
thickness (float, optional):
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.draw_polyline(points, label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, parent=parent, before=before, show=show, closed=closed, color=color, thickness=thickness, **kwargs)
def draw_quad(p1 : Union[List[float], Tuple[float, ...]], p2 : Union[List[float], Tuple[float, ...]], p3 : Union[List[float], Tuple[float, ...]], p4 : Union[List[float], Tuple[float, ...]], *, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, parent: Union[int, str] =0, before: Union[int, str] =0, show: bool =True, color: Union[List[int], Tuple[int, ...]] =(255, 255, 255, 255), fill: Union[List[int], Tuple[int, ...]] =(0, 0, 0, -255), thickness: float =1.0, **kwargs) -> Union[int, str]:
""" Adds a quad.
Args:
p1 (Union[List[float], Tuple[float, ...]]):
p2 (Union[List[float], Tuple[float, ...]]):
p3 (Union[List[float], Tuple[float, ...]]):
p4 (Union[List[float], Tuple[float, ...]]):
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
show (bool, optional): Attempt to render widget.
color (Union[List[int], Tuple[int, ...]], optional):
fill (Union[List[int], Tuple[int, ...]], optional):
thickness (float, optional):
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.draw_quad(p1, p2, p3, p4, label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, parent=parent, before=before, show=show, color=color, fill=fill, thickness=thickness, **kwargs)
def draw_rectangle(pmin : Union[List[float], Tuple[float, ...]], pmax : Union[List[float], Tuple[float, ...]], *, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, parent: Union[int, str] =0, before: Union[int, str] =0, show: bool =True, color: Union[List[int], Tuple[int, ...]] =(255, 255, 255, 255), color_upper_left: Union[List[int], Tuple[int, ...]] =(255, 255, 255, 255), color_upper_right: Union[List[int], Tuple[int, ...]] =(255, 255, 255, 255), color_bottom_right: Union[List[int], Tuple[int, ...]] =(255, 255, 255, 255), color_bottom_left: Union[List[int], Tuple[int, ...]] =(255, 255, 255, 255), fill: Union[List[int], Tuple[int, ...]] =(0, 0, 0, -255), multicolor: bool =False, rounding: float =0.0, thickness: float =1.0, **kwargs) -> Union[int, str]:
""" Adds a rectangle.
Args:
pmin (Union[List[float], Tuple[float, ...]]): Min point of bounding rectangle.
pmax (Union[List[float], Tuple[float, ...]]): Max point of bounding rectangle.
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
show (bool, optional): Attempt to render widget.
color (Union[List[int], Tuple[int, ...]], optional):
color_upper_left (Union[List[int], Tuple[int, ...]], optional): 'multicolor' must be set to 'True'
color_upper_right (Union[List[int], Tuple[int, ...]], optional): 'multicolor' must be set to 'True'
color_bottom_right (Union[List[int], Tuple[int, ...]], optional): 'multicolor' must be set to 'True'
color_bottom_left (Union[List[int], Tuple[int, ...]], optional): 'multicolor' must be set to 'True'
fill (Union[List[int], Tuple[int, ...]], optional):
multicolor (bool, optional):
rounding (float, optional): Number of pixels of the radius that will round the corners of the rectangle. Note: doesn't work with multicolor
thickness (float, optional):
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.draw_rectangle(pmin, pmax, label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, parent=parent, before=before, show=show, color=color, color_upper_left=color_upper_left, color_upper_right=color_upper_right, color_bottom_right=color_bottom_right, color_bottom_left=color_bottom_left, fill=fill, multicolor=multicolor, rounding=rounding, thickness=thickness, **kwargs)
def draw_text(pos : Union[List[float], Tuple[float, ...]], text : str, *, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, parent: Union[int, str] =0, before: Union[int, str] =0, show: bool =True, color: Union[List[int], Tuple[int, ...]] =(255, 255, 255, 255), size: float =10.0, **kwargs) -> Union[int, str]:
""" Adds text (drawlist).
Args:
pos (Union[List[float], Tuple[float, ...]]): Top left point of bounding text rectangle.
text (str): Text to draw.
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
show (bool, optional): Attempt to render widget.
color (Union[List[int], Tuple[int, ...]], optional):
size (float, optional):
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.draw_text(pos, text, label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, parent=parent, before=before, show=show, color=color, size=size, **kwargs)
def draw_triangle(p1 : Union[List[float], Tuple[float, ...]], p2 : Union[List[float], Tuple[float, ...]], p3 : Union[List[float], Tuple[float, ...]], *, label: str =None, user_data: Any =None, use_internal_label: bool =True, tag: Union[int, str] =0, parent: Union[int, str] =0, before: Union[int, str] =0, show: bool =True, color: Union[List[int], Tuple[int, ...]] =(255, 255, 255, 255), fill: Union[List[int], Tuple[int, ...]] =(0, 0, 0, -255), thickness: float =1.0, **kwargs) -> Union[int, str]:
""" Adds a triangle.
Args:
p1 (Union[List[float], Tuple[float, ...]]):
p2 (Union[List[float], Tuple[float, ...]]):
p3 (Union[List[float], Tuple[float, ...]]):
label (str, optional): Overrides 'name' as label.
user_data (Any, optional): User data for callbacks
use_internal_label (bool, optional): Use generated internal label instead of user specified (appends ### uuid).
tag (Union[int, str], optional): Unique id used to programmatically refer to the item.If label is unused this will be the label.
parent (Union[int, str], optional): Parent to add this item to. (runtime adding)
before (Union[int, str], optional): This item will be displayed before the specified item in the parent.
show (bool, optional): Attempt to render widget.
color (Union[List[int], Tuple[int, ...]], optional):
fill (Union[List[int], Tuple[int, ...]], optional):
thickness (float, optional):
id (Union[int, str], optional): (deprecated)
Returns:
Union[int, str]
"""
if 'id' in kwargs.keys():
warnings.warn('id keyword renamed to tag', DeprecationWarning, 2)
tag=kwargs['id']
return internal_dpg.draw_triangle(p1, p2, p3, label=label, user_data=user_data, use_internal_label=use_internal_label, tag=tag, parent=parent, before=before, show=show, color=color, fill=fill, thickness=thickness, **kwargs)
def empty_container_stack(**kwargs) -> None:
""" Emptyes the container stack.
Args:
Returns:
None
"""
return internal_dpg.empty_container_stack(**kwargs)
def fit_axis_data(axis : Union[int, str], **kwargs) -> None:
""" Sets the axis boundaries max/min in the data series currently on the plot.
Args:
axis (Union[int, str]):
Returns:
None
"""
return internal_dpg.fit_axis_data(axis, **kwargs)
def focus_item(item : Union[int, str], **kwargs) -> None:
""" Focuses an item.
Args:
item (Union[int, str]):
Returns:
None
"""
return internal_dpg.focus_item(item, **kwargs)
def generate_uuid(**kwargs) -> Union[int, str]:
""" Generate a new UUID.
Args:
Returns:
Union[int, str]
"""
return internal_dpg.generate_uuid(**kwargs)
def get_active_window(**kwargs) -> Union[int, str]:
""" Returns the active window.
Args:
Returns:
Union[int, str]
"""
return internal_dpg.get_active_window(**kwargs)
def get_alias_id(alias : str, **kwargs) -> Union[int, str]:
""" Returns the ID associated with an alias.
Args:
alias (str):
Returns:
Union[int, str]
"""
return internal_dpg.get_alias_id(alias, **kwargs)
def get_aliases(**kwargs) -> Union[List[str], Tuple[str, ...]]:
""" Returns all aliases.
Args:
Returns:
Union[List[str], Tuple[str, ...]]
"""
return internal_dpg.get_aliases(**kwargs)
def get_all_items(**kwargs) -> Union[List[int], Tuple[int, ...]]:
""" Returns all items.
Args:
Returns:
Union[List[int], Tuple[int, ...]]
"""
return internal_dpg.get_all_items(**kwargs)
def get_app_configuration(**kwargs) -> dict:
""" Returns app configuration.
Args:
Returns:
dict
"""
return internal_dpg.get_app_configuration(**kwargs)
def get_axis_limits(axis : Union[int, str], **kwargs) -> Union[List[float], Tuple[float, ...]]:
""" Get the specified axis limits.
Args:
axis (Union[int, str]):
Returns:
Union[List[float], Tuple[float, ...]]
"""
return internal_dpg.get_axis_limits(axis, **kwargs)
def get_callback_queue(**kwargs) -> Any:
""" New in 1.2. Returns and clears callback queue.
Args:
Returns:
Any
"""
return internal_dpg.get_callback_queue(**kwargs)
def get_clipboard_text(**kwargs) -> str:
""" New in 1.3. Gets the clipboard text.
Args:
Returns:
str
"""
return internal_dpg.get_clipboard_text(**kwargs)
def get_colormap_color(colormap : Union[int, str], index : int, **kwargs) -> Union[List[int], Tuple[int, ...]]:
""" Returns a color from a colormap given an index >= 0. (ex. 0 will be the first color in the color list of the color map) Modulo will be performed against the number of items in the color list.
Args:
colormap (Union[int, str]): The colormap tag. This should come from a colormap that was added to a colormap registry. Built in color maps are accessible through their corresponding constants mvPlotColormap_Twilight, mvPlotColormap_***
index (int): Desired position of the color in the colors list value of the colormap being quiered
Returns:
Union[List[int], Tuple[int, ...]]
"""
return internal_dpg.get_colormap_color(colormap, index, **kwargs)
def get_delta_time(**kwargs) -> float:
""" Returns time since last frame.
Args:
Returns:
float
"""
return internal_dpg.get_delta_time(**kwargs)
def get_drawing_mouse_pos(**kwargs) -> Union[List[int], Tuple[int, ...]]:
""" Returns mouse position in drawing.
Args:
Returns:
Union[List[int], Tuple[int, ...]]
"""
return internal_dpg.get_drawing_mouse_pos(**kwargs)
def get_file_dialog_info(file_dialog : Union[int, str], **kwargs) -> dict:
""" Returns information related to the file dialog. Typically used while the file dialog is in use to query data about the state or info related to the file dialog.
Args:
file_dialog (Union[int, str]):
Returns:
dict
"""
return internal_dpg.get_file_dialog_info(file_dialog, **kwargs)
def get_frame_count(**kwargs) -> int:
""" Returns frame count.
Args:
Returns:
int
"""
return internal_dpg.get_frame_count(**kwargs)
def get_frame_rate(**kwargs) -> float:
""" Returns the average frame rate across 120 frames.
Args:
Returns:
float
"""
return internal_dpg.get_frame_rate(**kwargs)
def get_global_font_scale(**kwargs) -> float:
""" Returns global font scale.
Args:
Returns:
float
"""
return internal_dpg.get_global_font_scale(**kwargs)
def get_item_alias(item : Union[int, str], **kwargs) -> str:
""" Returns an item's alias.
Args:
item (Union[int, str]):
Returns:
str
"""
return internal_dpg.get_item_alias(item, **kwargs)
def get_item_configuration(item : Union[int, str], **kwargs) -> dict:
""" Returns an item's configuration.
Args:
item (Union[int, str]):
Returns:
dict
"""
return internal_dpg.get_item_configuration(item, **kwargs)
def get_item_info(item : Union[int, str], **kwargs) -> dict:
""" Returns an item's information.
Args:
item (Union[int, str]):
Returns:
dict
"""
return internal_dpg.get_item_info(item, **kwargs)
def get_item_state(item : Union[int, str], **kwargs) -> dict:
""" Returns an item's state.
Args:
item (Union[int, str]):
Returns:
dict
"""
return internal_dpg.get_item_state(item, **kwargs)
def get_item_types(**kwargs) -> dict:
""" Returns an item types.
Args:
Returns:
dict
"""
return internal_dpg.get_item_types(**kwargs)
def get_mouse_drag_delta(**kwargs) -> float:
""" Returns mouse drag delta.
Args:
Returns:
float
"""
return internal_dpg.get_mouse_drag_delta(**kwargs)
def get_mouse_pos(*, local: bool =True, **kwargs) -> Union[List[int], Tuple[int, ...]]:
""" Returns mouse position.
Args:
local (bool, optional):
Returns:
Union[List[int], Tuple[int, ...]]
"""
return internal_dpg.get_mouse_pos(local=local, **kwargs)
def get_plot_mouse_pos(**kwargs) -> Union[List[int], Tuple[int, ...]]:
""" Returns mouse position in plot.
Args:
Returns:
Union[List[int], Tuple[int, ...]]
"""
return internal_dpg.get_plot_mouse_pos(**kwargs)
def get_plot_query_area(plot : Union[int, str], **kwargs) -> Union[List[float], Tuple[float, ...]]:
""" Returns the last/current query area of the plot. (Requires plot 'query' kwarg to be enabled)
Args:
plot (Union[int, str]):
Returns:
Union[List[float], Tuple[float, ...]]
"""
return internal_dpg.get_plot_query_area(plot, **kwargs)
def get_selected_links(node_editor : Union[int, str], **kwargs) -> List[List[str]]:
""" Returns a node editor's selected links.
Args:
node_editor (Union[int, str]):
Returns:
List[List[str]]
"""
return internal_dpg.get_selected_links(node_editor, **kwargs)
def get_selected_nodes(node_editor : Union[int, str], **kwargs) -> Union[List[int], Tuple[int, ...]]:
""" Returns a node editor's selected nodes.
Args:
node_editor (Union[int, str]):
Returns:
Union[List[int], Tuple[int, ...]]
"""
return internal_dpg.get_selected_nodes(node_editor, **kwargs)
def get_text_size(text : str, *, wrap_width: float =-1.0, font: Union[int, str] =0, **kwargs) -> Union[List[float], Tuple[float, ...]]:
""" Returns width/height of text with specified font (must occur after 1st frame).
Args:
text (str):
wrap_width (float, optional): Wrap width to use (-1.0 turns wrap off).
font (Union[int, str], optional): Font to use.
Returns:
Union[List[float], Tuple[float, ...]]
"""
return internal_dpg.get_text_size(text, wrap_width=wrap_width, font=font, **kwargs)
def get_total_time(**kwargs) -> float:
""" Returns total time since Dear PyGui has started.
Args:
Returns:
float
"""
return internal_dpg.get_total_time(**kwargs)
def get_value(item : Union[int, str], **kwargs) -> Any:
""" Returns an item's value.
Args:
item (Union[int, str]):
Returns:
Any
"""
return internal_dpg.get_value(item, **kwargs)
def get_values(items : Union[List[int], Tuple[int, ...]], **kwargs) -> Any:
""" Returns values of a list of items.
Args:
items (Union[List[int], Tuple[int, ...]]):
Returns:
Any
"""
return internal_dpg.get_values(items, **kwargs)
def get_viewport_configuration(item : Union[int, str], **kwargs) -> dict:
""" Returns a viewport's configuration.
Args:
item (Union[int, str]):
Returns:
dict
"""
return internal_dpg.get_viewport_configuration(item, **kwargs)
def get_windows(**kwargs) -> Union[List[int], Tuple[int, ...]]:
""" Returns all windows.
Args:
Returns:
Union[List[int], Tuple[int, ...]]
"""
return internal_dpg.get_windows(**kwargs)
def get_x_scroll(item : Union[int, str], **kwargs) -> float:
""" Undocumented
Args:
item (Union[int, str]):
Returns:
float
"""
return internal_dpg.get_x_scroll(item, **kwargs)
def get_x_scroll_max(item : Union[int, str], **kwargs) -> float:
""" Undocumented
Args:
item (Union[int, str]):
Returns:
float
"""
return internal_dpg.get_x_scroll_max(item, **kwargs)
def get_y_scroll(item : Union[int, str], **kwargs) -> float:
""" Undocumented
Args:
item (Union[int, str]):
Returns:
float
"""
return internal_dpg.get_y_scroll(item, **kwargs)
def get_y_scroll_max(item : Union[int, str], **kwargs) -> float:
""" Undocumented
Args:
item (Union[int, str]):
Returns:
float
"""
return internal_dpg.get_y_scroll_max(item, **kwargs)
def highlight_table_cell(table : Union[int, str], row : int, column : int, color : Union[List[int], Tuple[int, ...]], **kwargs) -> None:
""" Highlight specified table cell.
Args:
table (Union[int, str]):
row (int):
column (int):
color (Union[List[int], Tuple[int, ...]]):
Returns:
None
"""
return internal_dpg.highlight_table_cell(table, row, column, color, **kwargs)
def highlight_table_column(table : Union[int, str], column : int, color : Union[List[int], Tuple[int, ...]], **kwargs) -> None:
""" Highlight specified table column.
Args:
table (Union[int, str]):
column (int):
color (Union[List[int], Tuple[int, ...]]):
Returns:
None
"""
return internal_dpg.highlight_table_column(table, column, color, **kwargs)
def highlight_table_row(table : Union[int, str], row : int, color : Union[List[int], Tuple[int, ...]], **kwargs) -> None:
""" Highlight specified table row.
Args:
table (Union[int, str]):
row (int):
color (Union[List[int], Tuple[int, ...]]):
Returns:
None
"""
return internal_dpg.highlight_table_row(table, row, color, **kwargs)
def is_dearpygui_running(**kwargs) -> bool:
""" Checks if Dear PyGui is running
Args:
Returns:
bool
"""
return internal_dpg.is_dearpygui_running(**kwargs)
def is_key_down(key : int, **kwargs) -> bool:
""" Checks if key is down.
Args:
key (int):
Returns:
bool
"""
return internal_dpg.is_key_down(key, **kwargs)
def is_key_pressed(key : int, **kwargs) -> bool:
""" Checks if key is pressed.
Args:
key (int):
Returns:
bool
"""
return internal_dpg.is_key_pressed(key, **kwargs)
def is_key_released(key : int, **kwargs) -> bool:
""" Checks if key is released.
Args:
key (int):
Returns:
bool
"""
return internal_dpg.is_key_released(key, **kwargs)
def is_mouse_button_clicked(button : int, **kwargs) -> bool:
""" Checks if mouse button is clicked.
Args:
button (int):
Returns:
bool
"""
return internal_dpg.is_mouse_button_clicked(button, **kwargs)
def is_mouse_button_double_clicked(button : int, **kwargs) -> bool:
""" Checks if mouse button is double clicked.
Args:
button (int):
Returns:
bool
"""
return internal_dpg.is_mouse_button_double_clicked(button, **kwargs)
def is_mouse_button_down(button : int, **kwargs) -> bool:
""" Checks if mouse button is down.
Args:
button (int):
Returns:
bool
"""
return internal_dpg.is_mouse_button_down(button, **kwargs)
def is_mouse_button_dragging(button : int, threshold : float, **kwargs) -> bool:
""" Checks if mouse button is down and dragging.
Args:
button (int):
threshold (float):
Returns:
bool
"""
return internal_dpg.is_mouse_button_dragging(button, threshold, **kwargs)
def is_mouse_button_released(button : int, **kwargs) -> bool:
""" Checks if mouse button is released.
Args:
button (int):
Returns:
bool
"""
return internal_dpg.is_mouse_button_released(button, **kwargs)
def is_plot_queried(plot : Union[int, str], **kwargs) -> bool:
""" Returns true if the plot is currently being queried. (Requires plot 'query' kwarg to be enabled)
Args:
plot (Union[int, str]):
Returns:
bool
"""
return internal_dpg.is_plot_queried(plot, **kwargs)
def is_table_cell_highlighted(table : Union[int, str], row : int, column : int, **kwargs) -> bool:
""" Checks if a table cell is highlighted.
Args:
table (Union[int, str]):
row (int):
column (int):
Returns:
bool
"""
return internal_dpg.is_table_cell_highlighted(table, row, column, **kwargs)
def is_table_column_highlighted(table : Union[int, str], column : int, **kwargs) -> bool:
""" Checks if a table column is highlighted.
Args:
table (Union[int, str]):
column (int):
Returns:
bool
"""
return internal_dpg.is_table_column_highlighted(table, column, **kwargs)
def is_table_row_highlighted(table : Union[int, str], row : int, **kwargs) -> bool:
""" Checks if a table row is highlighted.
Args:
table (Union[int, str]):
row (int):
Returns:
bool
"""
return internal_dpg.is_table_row_highlighted(table, row, **kwargs)
def is_viewport_ok(**kwargs) -> bool:
""" Checks if a viewport has been created and shown.
Args:
Returns:
bool
"""
return internal_dpg.is_viewport_ok(**kwargs)
def last_container(**kwargs) -> Union[int, str]:
""" Returns the last container item added.
Args:
Returns:
Union[int, str]
"""
return internal_dpg.last_container(**kwargs)
def last_item(**kwargs) -> Union[int, str]:
""" Returns the last item added.
Args:
Returns:
Union[int, str]
"""
return internal_dpg.last_item(**kwargs)
def last_root(**kwargs) -> Union[int, str]:
""" Returns the last root added (registry or window).
Args:
Returns:
Union[int, str]
"""
return internal_dpg.last_root(**kwargs)
def load_image(file : str, *, gamma: float =1.0, gamma_scale_factor: float =1.0, **kwargs) -> Any:
""" Loads an image. Returns width, height, channels, mvBuffer
Args:
file (str):
gamma (float, optional): Gamma correction factor. (default is 1.0 to avoid automatic gamma correction on loading.
gamma_scale_factor (float, optional): Gamma scale factor.
Returns:
Any
"""
return internal_dpg.load_image(file, gamma=gamma, gamma_scale_factor=gamma_scale_factor, **kwargs)
def lock_mutex(**kwargs) -> None:
""" Locks render thread mutex.
Args:
Returns:
None
"""
return internal_dpg.lock_mutex(**kwargs)
def maximize_viewport(**kwargs) -> None:
""" Maximizes the viewport.
Args:
Returns:
None
"""
return internal_dpg.maximize_viewport(**kwargs)
def minimize_viewport(**kwargs) -> None:
""" Minimizes a viewport.
Args:
Returns:
None
"""
return internal_dpg.minimize_viewport(**kwargs)
def move_item(item : Union[int, str], *, parent: Union[int, str] =0, before: Union[int, str] =0, **kwargs) -> None:
""" Moves an item to a new location.
Args:
item (Union[int, str]):
parent (Union[int, str], optional):
before (Union[int, str], optional):
Returns:
None
"""
return internal_dpg.move_item(item, parent=parent, before=before, **kwargs)
def move_item_down(item : Union[int, str], **kwargs) -> None:
""" Moves an item down.
Args:
item (Union[int, str]):
Returns:
None
"""
return internal_dpg.move_item_down(item, **kwargs)
def move_item_up(item : Union[int, str], **kwargs) -> None:
""" Moves an item up.
Args:
item (Union[int, str]):
Returns:
None
"""
return internal_dpg.move_item_up(item, **kwargs)
def pop_container_stack(**kwargs) -> Union[int, str]:
""" Pops the top item off the parent stack and return its ID.
Args:
Returns:
Union[int, str]
"""
return internal_dpg.pop_container_stack(**kwargs)
def push_container_stack(item : Union[int, str], **kwargs) -> bool:
""" Pushes an item onto the container stack.
Args:
item (Union[int, str]):
Returns:
bool
"""
return internal_dpg.push_container_stack(item, **kwargs)
def remove_alias(alias : str, **kwargs) -> None:
""" Removes an alias.
Args:
alias (str):
Returns:
None
"""
return internal_dpg.remove_alias(alias, **kwargs)
def render_dearpygui_frame(**kwargs) -> None:
""" Render a single Dear PyGui frame.
Args:
Returns:
None
"""
return internal_dpg.render_dearpygui_frame(**kwargs)
def reorder_items(container : Union[int, str], slot : int, new_order : Union[List[int], Tuple[int, ...]], **kwargs) -> None:
""" Reorders an item's children.
Args:
container (Union[int, str]):
slot (int):
new_order (Union[List[int], Tuple[int, ...]]):
Returns:
None
"""
return internal_dpg.reorder_items(container, slot, new_order, **kwargs)
def reset_axis_ticks(axis : Union[int, str], **kwargs) -> None:
""" Removes the manually set axis ticks and applies the default axis ticks
Args:
axis (Union[int, str]):
Returns:
None
"""
return internal_dpg.reset_axis_ticks(axis, **kwargs)
def reset_pos(item : Union[int, str], **kwargs) -> None:
""" Resets an item's position after using 'set_item_pos'.
Args:
item (Union[int, str]):
Returns:
None
"""
return internal_dpg.reset_pos(item, **kwargs)
def sample_colormap(colormap : Union[int, str], t : float, **kwargs) -> Union[List[int], Tuple[int, ...]]:
""" Returns a color from a colormap given t between 0.0-1.0.
Args:
colormap (Union[int, str]): The colormap tag. This should come from a colormap that was added to a colormap registry. Built in color maps are accessible through their corresponding constants mvPlotColormap_Twilight, mvPlotColormap_***
t (float): Value of the colormap to sample between 0.0-1.0
Returns:
Union[List[int], Tuple[int, ...]]
"""
return internal_dpg.sample_colormap(colormap, t, **kwargs)
def save_init_file(file : str, **kwargs) -> None:
""" Save dpg.ini file.
Args:
file (str):
Returns:
None
"""
return internal_dpg.save_init_file(file, **kwargs)
def set_axis_limits(axis : Union[int, str], ymin : float, ymax : float, **kwargs) -> None:
""" Sets limits on the axis for pan and zoom.
Args:
axis (Union[int, str]):
ymin (float):
ymax (float):
Returns:
None
"""
return internal_dpg.set_axis_limits(axis, ymin, ymax, **kwargs)
def set_axis_limits_auto(axis : Union[int, str], **kwargs) -> None:
""" Removes all limits on specified axis.
Args:
axis (Union[int, str]):
Returns:
None
"""
return internal_dpg.set_axis_limits_auto(axis, **kwargs)
def set_axis_ticks(axis : Union[int, str], label_pairs : Any, **kwargs) -> None:
""" Replaces axis ticks with 'label_pairs' argument.
Args:
axis (Union[int, str]):
label_pairs (Any): Tuples of label and value in the form '((label, axis_value), (label, axis_value), ...)'
Returns:
None
"""
return internal_dpg.set_axis_ticks(axis, label_pairs, **kwargs)
def set_clip_space(item : Union[int, str], top_left_x : float, top_left_y : float, width : float, height : float, min_depth : float, max_depth : float, **kwargs) -> None:
""" New in 1.1. Set the clip space for depth clipping and 'viewport' transformation.
Args:
item (Union[int, str]): draw layer to set clip space
top_left_x (float): angle to rotate
top_left_y (float): angle to rotate
width (float): angle to rotate
height (float): angle to rotate
min_depth (float): angle to rotate
max_depth (float): angle to rotate
Returns:
None
"""
return internal_dpg.set_clip_space(item, top_left_x, top_left_y, width, height, min_depth, max_depth, **kwargs)
def set_clipboard_text(text : str, **kwargs) -> None:
""" New in 1.3. Sets the clipboard text.
Args:
text (str):
Returns:
None
"""
return internal_dpg.set_clipboard_text(text, **kwargs)
def set_exit_callback(callback : Callable, *, user_data: Any =None, **kwargs) -> str:
""" Sets a callback to run on last frame.
Args:
callback (Callable):
user_data (Any, optional): New in 1.3. Optional user data to send to the callback
Returns:
str
"""
return internal_dpg.set_exit_callback(callback, user_data=user_data, **kwargs)
def set_frame_callback(frame : int, callback : Callable, *, user_data: Any =None, **kwargs) -> str:
""" Sets a callback to run on first frame.
Args:
frame (int):
callback (Callable):
user_data (Any, optional): New in 1.3. Optional user data to send to the callback
Returns:
str
"""
return internal_dpg.set_frame_callback(frame, callback, user_data=user_data, **kwargs)
def set_global_font_scale(scale : float, **kwargs) -> None:
""" Sets global font scale.
Args:
scale (float):
Returns:
None
"""
return internal_dpg.set_global_font_scale(scale, **kwargs)
def set_item_alias(item : Union[int, str], alias : str, **kwargs) -> None:
""" Sets an item's alias.
Args:
item (Union[int, str]):
alias (str):
Returns:
None
"""
return internal_dpg.set_item_alias(item, alias, **kwargs)
def set_item_children(item : Union[int, str], source : Union[int, str], slot : int, **kwargs) -> None:
""" Sets an item's children.
Args:
item (Union[int, str]):
source (Union[int, str]):
slot (int):
Returns:
None
"""
return internal_dpg.set_item_children(item, source, slot, **kwargs)
def set_primary_window(window : Union[int, str], value : bool, **kwargs) -> None:
""" Sets the primary window.
Args:
window (Union[int, str]):
value (bool):
Returns:
None
"""
return internal_dpg.set_primary_window(window, value, **kwargs)
def set_table_row_color(table : Union[int, str], row : int, color : Union[List[int], Tuple[int, ...]], **kwargs) -> None:
""" Set table row color.
Args:
table (Union[int, str]):
row (int):
color (Union[List[int], Tuple[int, ...]]):
Returns:
None
"""
return internal_dpg.set_table_row_color(table, row, color, **kwargs)
def set_value(item : Union[int, str], value : Any, **kwargs) -> None:
""" Set's an item's value.
Args:
item (Union[int, str]):
value (Any):
Returns:
None
"""
return internal_dpg.set_value(item, value, **kwargs)
def set_viewport_resize_callback(callback : Callable, *, user_data: Any =None, **kwargs) -> str:
""" Sets a callback to run on viewport resize.
Args:
callback (Callable):
user_data (Any, optional): New in 1.3. Optional user data to send to the callback
Returns:
str
"""
return internal_dpg.set_viewport_resize_callback(callback, user_data=user_data, **kwargs)
def set_x_scroll(item : Union[int, str], value : float, **kwargs) -> None:
""" Undocumented
Args:
item (Union[int, str]):
value (float):
Returns:
None
"""
return internal_dpg.set_x_scroll(item, value, **kwargs)
def set_y_scroll(item : Union[int, str], value : float, **kwargs) -> None:
""" Undocumented
Args:
item (Union[int, str]):
value (float):
Returns:
None
"""
return internal_dpg.set_y_scroll(item, value, **kwargs)
def setup_dearpygui(**kwargs) -> None:
""" Sets up Dear PyGui
Args:
viewport (Union[int, str], optional): (deprecated)
Returns:
None
"""
if 'viewport' in kwargs.keys():
warnings.warn('viewport keyword removed', DeprecationWarning, 2)
kwargs.pop('viewport', None)
return internal_dpg.setup_dearpygui(**kwargs)
def show_imgui_demo(**kwargs) -> None:
""" Shows the imgui demo.
Args:
Returns:
None
"""
return internal_dpg.show_imgui_demo(**kwargs)
def show_implot_demo(**kwargs) -> None:
""" Shows the implot demo.
Args:
Returns:
None
"""
return internal_dpg.show_implot_demo(**kwargs)
def show_item_debug(item : Union[int, str], **kwargs) -> None:
""" Shows an item's debug window
Args:
item (Union[int, str]):
Returns:
None
"""
return internal_dpg.show_item_debug(item, **kwargs)
def show_tool(tool : Union[int, str], **kwargs) -> str:
""" Shows a built in tool.
Args:
tool (Union[int, str]):
Returns:
str
"""
return internal_dpg.show_tool(tool, **kwargs)
def show_viewport(*, minimized: bool =False, maximized: bool =False, **kwargs) -> None:
""" Shows the main viewport.
Args:
minimized (bool, optional): Sets the state of the viewport to minimized
maximized (bool, optional): Sets the state of the viewport to maximized
viewport (Union[int, str], optional): (deprecated)
Returns:
None
"""
if 'viewport' in kwargs.keys():
warnings.warn('viewport keyword removed', DeprecationWarning, 2)
kwargs.pop('viewport', None)
return internal_dpg.show_viewport(minimized=minimized, maximized=maximized, **kwargs)
def split_frame(*, delay: int =32, **kwargs) -> None:
""" Waits one frame.
Args:
delay (int, optional): Minimal delay in in milliseconds
Returns:
None
"""
return internal_dpg.split_frame(delay=delay, **kwargs)
def stop_dearpygui(**kwargs) -> None:
""" Stops Dear PyGui
Args:
Returns:
None
"""
return internal_dpg.stop_dearpygui(**kwargs)
def toggle_viewport_fullscreen(**kwargs) -> None:
""" Toggle viewport fullscreen mode..
Args:
Returns:
None
"""
return internal_dpg.toggle_viewport_fullscreen(**kwargs)
def top_container_stack(**kwargs) -> Union[int, str]:
""" Returns the item on the top of the container stack.
Args:
Returns:
Union[int, str]
"""
return internal_dpg.top_container_stack(**kwargs)
def unhighlight_table_cell(table : Union[int, str], row : int, column : int, **kwargs) -> None:
""" Unhighlight specified table cell.
Args:
table (Union[int, str]):
row (int):
column (int):
Returns:
None
"""
return internal_dpg.unhighlight_table_cell(table, row, column, **kwargs)
def unhighlight_table_column(table : Union[int, str], column : int, **kwargs) -> None:
""" Unhighlight specified table column.
Args:
table (Union[int, str]):
column (int):
Returns:
None
"""
return internal_dpg.unhighlight_table_column(table, column, **kwargs)
def unhighlight_table_row(table : Union[int, str], row : int, **kwargs) -> None:
""" Unhighlight specified table row.
Args:
table (Union[int, str]):
row (int):
Returns:
None
"""
return internal_dpg.unhighlight_table_row(table, row, **kwargs)
def unlock_mutex(**kwargs) -> None:
""" Unlocks render thread mutex
Args:
Returns:
None
"""
return internal_dpg.unlock_mutex(**kwargs)
def unset_table_row_color(table : Union[int, str], row : int, **kwargs) -> None:
""" Remove user set table row color.
Args:
table (Union[int, str]):
row (int):
Returns:
None
"""
return internal_dpg.unset_table_row_color(table, row, **kwargs)
def unstage(item : Union[int, str], **kwargs) -> None:
""" Unstages an item.
Args:
item (Union[int, str]):
Returns:
None
"""
return internal_dpg.unstage(item, **kwargs)
##########################################################
# Constants #
##########################################################
mvGraphicsBackend_D3D11=internal_dpg.mvGraphicsBackend_D3D11
mvGraphicsBackend_D3D12=internal_dpg.mvGraphicsBackend_D3D12
mvGraphicsBackend_VULKAN=internal_dpg.mvGraphicsBackend_VULKAN
mvGraphicsBackend_METAL=internal_dpg.mvGraphicsBackend_METAL
mvGraphicsBackend_OPENGL=internal_dpg.mvGraphicsBackend_OPENGL
mvMouseButton_Left=internal_dpg.mvMouseButton_Left
mvMouseButton_Right=internal_dpg.mvMouseButton_Right
mvMouseButton_Middle=internal_dpg.mvMouseButton_Middle
mvMouseButton_X1=internal_dpg.mvMouseButton_X1
mvMouseButton_X2=internal_dpg.mvMouseButton_X2
mvKey_0=internal_dpg.mvKey_0
mvKey_1=internal_dpg.mvKey_1
mvKey_2=internal_dpg.mvKey_2
mvKey_3=internal_dpg.mvKey_3
mvKey_4=internal_dpg.mvKey_4
mvKey_5=internal_dpg.mvKey_5
mvKey_6=internal_dpg.mvKey_6
mvKey_7=internal_dpg.mvKey_7
mvKey_8=internal_dpg.mvKey_8
mvKey_9=internal_dpg.mvKey_9
mvKey_A=internal_dpg.mvKey_A
mvKey_B=internal_dpg.mvKey_B
mvKey_C=internal_dpg.mvKey_C
mvKey_D=internal_dpg.mvKey_D
mvKey_E=internal_dpg.mvKey_E
mvKey_F=internal_dpg.mvKey_F
mvKey_G=internal_dpg.mvKey_G
mvKey_H=internal_dpg.mvKey_H
mvKey_I=internal_dpg.mvKey_I
mvKey_J=internal_dpg.mvKey_J
mvKey_K=internal_dpg.mvKey_K
mvKey_L=internal_dpg.mvKey_L
mvKey_M=internal_dpg.mvKey_M
mvKey_N=internal_dpg.mvKey_N
mvKey_O=internal_dpg.mvKey_O
mvKey_P=internal_dpg.mvKey_P
mvKey_Q=internal_dpg.mvKey_Q
mvKey_R=internal_dpg.mvKey_R
mvKey_S=internal_dpg.mvKey_S
mvKey_T=internal_dpg.mvKey_T
mvKey_U=internal_dpg.mvKey_U
mvKey_V=internal_dpg.mvKey_V
mvKey_W=internal_dpg.mvKey_W
mvKey_X=internal_dpg.mvKey_X
mvKey_Y=internal_dpg.mvKey_Y
mvKey_Z=internal_dpg.mvKey_Z
mvKey_Back=internal_dpg.mvKey_Back
mvKey_Tab=internal_dpg.mvKey_Tab
mvKey_Clear=internal_dpg.mvKey_Clear
mvKey_Return=internal_dpg.mvKey_Return
mvKey_Shift=internal_dpg.mvKey_Shift
mvKey_Control=internal_dpg.mvKey_Control
mvKey_Alt=internal_dpg.mvKey_Alt
mvKey_Pause=internal_dpg.mvKey_Pause
mvKey_Capital=internal_dpg.mvKey_Capital
mvKey_Escape=internal_dpg.mvKey_Escape
mvKey_Spacebar=internal_dpg.mvKey_Spacebar
mvKey_Prior=internal_dpg.mvKey_Prior
mvKey_Next=internal_dpg.mvKey_Next
mvKey_End=internal_dpg.mvKey_End
mvKey_Home=internal_dpg.mvKey_Home
mvKey_Left=internal_dpg.mvKey_Left
mvKey_Up=internal_dpg.mvKey_Up
mvKey_Right=internal_dpg.mvKey_Right
mvKey_Down=internal_dpg.mvKey_Down
mvKey_Select=internal_dpg.mvKey_Select
mvKey_Print=internal_dpg.mvKey_Print
mvKey_Execute=internal_dpg.mvKey_Execute
mvKey_PrintScreen=internal_dpg.mvKey_PrintScreen
mvKey_Insert=internal_dpg.mvKey_Insert
mvKey_Delete=internal_dpg.mvKey_Delete
mvKey_Help=internal_dpg.mvKey_Help
mvKey_LWin=internal_dpg.mvKey_LWin
mvKey_RWin=internal_dpg.mvKey_RWin
mvKey_Apps=internal_dpg.mvKey_Apps
mvKey_Sleep=internal_dpg.mvKey_Sleep
mvKey_NumPad0=internal_dpg.mvKey_NumPad0
mvKey_NumPad1=internal_dpg.mvKey_NumPad1
mvKey_NumPad2=internal_dpg.mvKey_NumPad2
mvKey_NumPad3=internal_dpg.mvKey_NumPad3
mvKey_NumPad4=internal_dpg.mvKey_NumPad4
mvKey_NumPad5=internal_dpg.mvKey_NumPad5
mvKey_NumPad6=internal_dpg.mvKey_NumPad6
mvKey_NumPad7=internal_dpg.mvKey_NumPad7
mvKey_NumPad8=internal_dpg.mvKey_NumPad8
mvKey_NumPad9=internal_dpg.mvKey_NumPad9
mvKey_Multiply=internal_dpg.mvKey_Multiply
mvKey_Add=internal_dpg.mvKey_Add
mvKey_Separator=internal_dpg.mvKey_Separator
mvKey_Subtract=internal_dpg.mvKey_Subtract
mvKey_Decimal=internal_dpg.mvKey_Decimal
mvKey_Divide=internal_dpg.mvKey_Divide
mvKey_F1=internal_dpg.mvKey_F1
mvKey_F2=internal_dpg.mvKey_F2
mvKey_F3=internal_dpg.mvKey_F3
mvKey_F4=internal_dpg.mvKey_F4
mvKey_F5=internal_dpg.mvKey_F5
mvKey_F6=internal_dpg.mvKey_F6
mvKey_F7=internal_dpg.mvKey_F7
mvKey_F8=internal_dpg.mvKey_F8
mvKey_F9=internal_dpg.mvKey_F9
mvKey_F10=internal_dpg.mvKey_F10
mvKey_F11=internal_dpg.mvKey_F11
mvKey_F12=internal_dpg.mvKey_F12
mvKey_F13=internal_dpg.mvKey_F13
mvKey_F14=internal_dpg.mvKey_F14
mvKey_F15=internal_dpg.mvKey_F15
mvKey_F16=internal_dpg.mvKey_F16
mvKey_F17=internal_dpg.mvKey_F17
mvKey_F18=internal_dpg.mvKey_F18
mvKey_F19=internal_dpg.mvKey_F19
mvKey_F20=internal_dpg.mvKey_F20
mvKey_F21=internal_dpg.mvKey_F21
mvKey_F22=internal_dpg.mvKey_F22
mvKey_F23=internal_dpg.mvKey_F23
mvKey_F24=internal_dpg.mvKey_F24
mvKey_F25=internal_dpg.mvKey_F25
mvKey_NumLock=internal_dpg.mvKey_NumLock
mvKey_ScrollLock=internal_dpg.mvKey_ScrollLock
mvKey_LShift=internal_dpg.mvKey_LShift
mvKey_RShift=internal_dpg.mvKey_RShift
mvKey_LControl=internal_dpg.mvKey_LControl
mvKey_RControl=internal_dpg.mvKey_RControl
mvKey_LMenu=internal_dpg.mvKey_LMenu
mvKey_RMenu=internal_dpg.mvKey_RMenu
mvKey_Browser_Back=internal_dpg.mvKey_Browser_Back
mvKey_Browser_Forward=internal_dpg.mvKey_Browser_Forward
mvKey_Browser_Refresh=internal_dpg.mvKey_Browser_Refresh
mvKey_Browser_Stop=internal_dpg.mvKey_Browser_Stop
mvKey_Browser_Search=internal_dpg.mvKey_Browser_Search
mvKey_Browser_Favorites=internal_dpg.mvKey_Browser_Favorites
mvKey_Browser_Home=internal_dpg.mvKey_Browser_Home
mvKey_Volume_Mute=internal_dpg.mvKey_Volume_Mute
mvKey_Volume_Down=internal_dpg.mvKey_Volume_Down
mvKey_Volume_Up=internal_dpg.mvKey_Volume_Up
mvKey_Media_Next_Track=internal_dpg.mvKey_Media_Next_Track
mvKey_Media_Prev_Track=internal_dpg.mvKey_Media_Prev_Track
mvKey_Media_Stop=internal_dpg.mvKey_Media_Stop
mvKey_Media_Play_Pause=internal_dpg.mvKey_Media_Play_Pause
mvKey_Launch_Mail=internal_dpg.mvKey_Launch_Mail
mvKey_Launch_Media_Select=internal_dpg.mvKey_Launch_Media_Select
mvKey_Launch_App1=internal_dpg.mvKey_Launch_App1
mvKey_Launch_App2=internal_dpg.mvKey_Launch_App2
mvKey_Colon=internal_dpg.mvKey_Colon
mvKey_Plus=internal_dpg.mvKey_Plus
mvKey_Comma=internal_dpg.mvKey_Comma
mvKey_Minus=internal_dpg.mvKey_Minus
mvKey_Period=internal_dpg.mvKey_Period
mvKey_Slash=internal_dpg.mvKey_Slash
mvKey_Tilde=internal_dpg.mvKey_Tilde
mvKey_Open_Brace=internal_dpg.mvKey_Open_Brace
mvKey_Backslash=internal_dpg.mvKey_Backslash
mvKey_Close_Brace=internal_dpg.mvKey_Close_Brace
mvKey_Quote=internal_dpg.mvKey_Quote
mvAll=internal_dpg.mvAll
mvTool_About=internal_dpg.mvTool_About
mvTool_Debug=internal_dpg.mvTool_Debug
mvTool_Doc=internal_dpg.mvTool_Doc
mvTool_ItemRegistry=internal_dpg.mvTool_ItemRegistry
mvTool_Metrics=internal_dpg.mvTool_Metrics
mvTool_Style=internal_dpg.mvTool_Style
mvTool_Font=internal_dpg.mvTool_Font
mvFontAtlas=internal_dpg.mvFontAtlas
mvAppUUID=internal_dpg.mvAppUUID
mvInvalidUUID=internal_dpg.mvInvalidUUID
mvDir_None=internal_dpg.mvDir_None
mvDir_Left=internal_dpg.mvDir_Left
mvDir_Right=internal_dpg.mvDir_Right
mvDir_Up=internal_dpg.mvDir_Up
mvDir_Down=internal_dpg.mvDir_Down
mvComboHeight_Small=internal_dpg.mvComboHeight_Small
mvComboHeight_Regular=internal_dpg.mvComboHeight_Regular
mvComboHeight_Large=internal_dpg.mvComboHeight_Large
mvComboHeight_Largest=internal_dpg.mvComboHeight_Largest
mvColorEdit_AlphaPreviewNone=internal_dpg.mvColorEdit_AlphaPreviewNone
mvColorEdit_AlphaPreview=internal_dpg.mvColorEdit_AlphaPreview
mvColorEdit_AlphaPreviewHalf=internal_dpg.mvColorEdit_AlphaPreviewHalf
mvColorEdit_uint8=internal_dpg.mvColorEdit_uint8
mvColorEdit_float=internal_dpg.mvColorEdit_float
mvColorEdit_rgb=internal_dpg.mvColorEdit_rgb
mvColorEdit_hsv=internal_dpg.mvColorEdit_hsv
mvColorEdit_hex=internal_dpg.mvColorEdit_hex
mvColorEdit_input_rgb=internal_dpg.mvColorEdit_input_rgb
mvColorEdit_input_hsv=internal_dpg.mvColorEdit_input_hsv
mvPlotColormap_Default=internal_dpg.mvPlotColormap_Default
mvPlotColormap_Deep=internal_dpg.mvPlotColormap_Deep
mvPlotColormap_Dark=internal_dpg.mvPlotColormap_Dark
mvPlotColormap_Pastel=internal_dpg.mvPlotColormap_Pastel
mvPlotColormap_Paired=internal_dpg.mvPlotColormap_Paired
mvPlotColormap_Viridis=internal_dpg.mvPlotColormap_Viridis
mvPlotColormap_Plasma=internal_dpg.mvPlotColormap_Plasma
mvPlotColormap_Hot=internal_dpg.mvPlotColormap_Hot
mvPlotColormap_Cool=internal_dpg.mvPlotColormap_Cool
mvPlotColormap_Pink=internal_dpg.mvPlotColormap_Pink
mvPlotColormap_Jet=internal_dpg.mvPlotColormap_Jet
mvPlotColormap_Twilight=internal_dpg.mvPlotColormap_Twilight
mvPlotColormap_RdBu=internal_dpg.mvPlotColormap_RdBu
mvPlotColormap_BrBG=internal_dpg.mvPlotColormap_BrBG
mvPlotColormap_PiYG=internal_dpg.mvPlotColormap_PiYG
mvPlotColormap_Spectral=internal_dpg.mvPlotColormap_Spectral
mvPlotColormap_Greys=internal_dpg.mvPlotColormap_Greys
mvColorPicker_bar=internal_dpg.mvColorPicker_bar
mvColorPicker_wheel=internal_dpg.mvColorPicker_wheel
mvTabOrder_Reorderable=internal_dpg.mvTabOrder_Reorderable
mvTabOrder_Fixed=internal_dpg.mvTabOrder_Fixed
mvTabOrder_Leading=internal_dpg.mvTabOrder_Leading
mvTabOrder_Trailing=internal_dpg.mvTabOrder_Trailing
mvDatePickerLevel_Day=internal_dpg.mvDatePickerLevel_Day
mvDatePickerLevel_Month=internal_dpg.mvDatePickerLevel_Month
mvDatePickerLevel_Year=internal_dpg.mvDatePickerLevel_Year
mvCullMode_None=internal_dpg.mvCullMode_None
mvCullMode_Back=internal_dpg.mvCullMode_Back
mvCullMode_Front=internal_dpg.mvCullMode_Front
mvFontRangeHint_Default=internal_dpg.mvFontRangeHint_Default
mvFontRangeHint_Japanese=internal_dpg.mvFontRangeHint_Japanese
mvFontRangeHint_Korean=internal_dpg.mvFontRangeHint_Korean
mvFontRangeHint_Chinese_Full=internal_dpg.mvFontRangeHint_Chinese_Full
mvFontRangeHint_Chinese_Simplified_Common=internal_dpg.mvFontRangeHint_Chinese_Simplified_Common
mvFontRangeHint_Cyrillic=internal_dpg.mvFontRangeHint_Cyrillic
mvFontRangeHint_Thai=internal_dpg.mvFontRangeHint_Thai
mvFontRangeHint_Vietnamese=internal_dpg.mvFontRangeHint_Vietnamese
mvNode_PinShape_Circle=internal_dpg.mvNode_PinShape_Circle
mvNode_PinShape_CircleFilled=internal_dpg.mvNode_PinShape_CircleFilled
mvNode_PinShape_Triangle=internal_dpg.mvNode_PinShape_Triangle
mvNode_PinShape_TriangleFilled=internal_dpg.mvNode_PinShape_TriangleFilled
mvNode_PinShape_Quad=internal_dpg.mvNode_PinShape_Quad
mvNode_PinShape_QuadFilled=internal_dpg.mvNode_PinShape_QuadFilled
mvNode_Attr_Input=internal_dpg.mvNode_Attr_Input
mvNode_Attr_Output=internal_dpg.mvNode_Attr_Output
mvNode_Attr_Static=internal_dpg.mvNode_Attr_Static
mvPlotBin_Sqrt=internal_dpg.mvPlotBin_Sqrt
mvPlotBin_Sturges=internal_dpg.mvPlotBin_Sturges
mvPlotBin_Rice=internal_dpg.mvPlotBin_Rice
mvPlotBin_Scott=internal_dpg.mvPlotBin_Scott
mvXAxis=internal_dpg.mvXAxis
mvYAxis=internal_dpg.mvYAxis
mvPlotMarker_None=internal_dpg.mvPlotMarker_None
mvPlotMarker_Circle=internal_dpg.mvPlotMarker_Circle
mvPlotMarker_Square=internal_dpg.mvPlotMarker_Square
mvPlotMarker_Diamond=internal_dpg.mvPlotMarker_Diamond
mvPlotMarker_Up=internal_dpg.mvPlotMarker_Up
mvPlotMarker_Down=internal_dpg.mvPlotMarker_Down
mvPlotMarker_Left=internal_dpg.mvPlotMarker_Left
mvPlotMarker_Right=internal_dpg.mvPlotMarker_Right
mvPlotMarker_Cross=internal_dpg.mvPlotMarker_Cross
mvPlotMarker_Plus=internal_dpg.mvPlotMarker_Plus
mvPlotMarker_Asterisk=internal_dpg.mvPlotMarker_Asterisk
mvPlot_Location_Center=internal_dpg.mvPlot_Location_Center
mvPlot_Location_North=internal_dpg.mvPlot_Location_North
mvPlot_Location_South=internal_dpg.mvPlot_Location_South
mvPlot_Location_West=internal_dpg.mvPlot_Location_West
mvPlot_Location_East=internal_dpg.mvPlot_Location_East
mvPlot_Location_NorthWest=internal_dpg.mvPlot_Location_NorthWest
mvPlot_Location_NorthEast=internal_dpg.mvPlot_Location_NorthEast
mvPlot_Location_SouthWest=internal_dpg.mvPlot_Location_SouthWest
mvPlot_Location_SouthEast=internal_dpg.mvPlot_Location_SouthEast
mvTable_SizingFixedFit=internal_dpg.mvTable_SizingFixedFit
mvTable_SizingFixedSame=internal_dpg.mvTable_SizingFixedSame
mvTable_SizingStretchProp=internal_dpg.mvTable_SizingStretchProp
mvTable_SizingStretchSame=internal_dpg.mvTable_SizingStretchSame
mvFormat_Float_rgba=internal_dpg.mvFormat_Float_rgba
mvFormat_Float_rgb=internal_dpg.mvFormat_Float_rgb
mvThemeCat_Core=internal_dpg.mvThemeCat_Core
mvThemeCat_Plots=internal_dpg.mvThemeCat_Plots
mvThemeCat_Nodes=internal_dpg.mvThemeCat_Nodes
mvThemeCol_Text=internal_dpg.mvThemeCol_Text
mvThemeCol_TextDisabled=internal_dpg.mvThemeCol_TextDisabled
mvThemeCol_WindowBg=internal_dpg.mvThemeCol_WindowBg
mvThemeCol_ChildBg=internal_dpg.mvThemeCol_ChildBg
mvThemeCol_Border=internal_dpg.mvThemeCol_Border
mvThemeCol_PopupBg=internal_dpg.mvThemeCol_PopupBg
mvThemeCol_BorderShadow=internal_dpg.mvThemeCol_BorderShadow
mvThemeCol_FrameBg=internal_dpg.mvThemeCol_FrameBg
mvThemeCol_FrameBgHovered=internal_dpg.mvThemeCol_FrameBgHovered
mvThemeCol_FrameBgActive=internal_dpg.mvThemeCol_FrameBgActive
mvThemeCol_TitleBg=internal_dpg.mvThemeCol_TitleBg
mvThemeCol_TitleBgActive=internal_dpg.mvThemeCol_TitleBgActive
mvThemeCol_TitleBgCollapsed=internal_dpg.mvThemeCol_TitleBgCollapsed
mvThemeCol_MenuBarBg=internal_dpg.mvThemeCol_MenuBarBg
mvThemeCol_ScrollbarBg=internal_dpg.mvThemeCol_ScrollbarBg
mvThemeCol_ScrollbarGrab=internal_dpg.mvThemeCol_ScrollbarGrab
mvThemeCol_ScrollbarGrabHovered=internal_dpg.mvThemeCol_ScrollbarGrabHovered
mvThemeCol_ScrollbarGrabActive=internal_dpg.mvThemeCol_ScrollbarGrabActive
mvThemeCol_CheckMark=internal_dpg.mvThemeCol_CheckMark
mvThemeCol_SliderGrab=internal_dpg.mvThemeCol_SliderGrab
mvThemeCol_SliderGrabActive=internal_dpg.mvThemeCol_SliderGrabActive
mvThemeCol_Button=internal_dpg.mvThemeCol_Button
mvThemeCol_ButtonHovered=internal_dpg.mvThemeCol_ButtonHovered
mvThemeCol_ButtonActive=internal_dpg.mvThemeCol_ButtonActive
mvThemeCol_Header=internal_dpg.mvThemeCol_Header
mvThemeCol_HeaderHovered=internal_dpg.mvThemeCol_HeaderHovered
mvThemeCol_HeaderActive=internal_dpg.mvThemeCol_HeaderActive
mvThemeCol_Separator=internal_dpg.mvThemeCol_Separator
mvThemeCol_SeparatorHovered=internal_dpg.mvThemeCol_SeparatorHovered
mvThemeCol_SeparatorActive=internal_dpg.mvThemeCol_SeparatorActive
mvThemeCol_ResizeGrip=internal_dpg.mvThemeCol_ResizeGrip
mvThemeCol_ResizeGripHovered=internal_dpg.mvThemeCol_ResizeGripHovered
mvThemeCol_ResizeGripActive=internal_dpg.mvThemeCol_ResizeGripActive
mvThemeCol_Tab=internal_dpg.mvThemeCol_Tab
mvThemeCol_TabHovered=internal_dpg.mvThemeCol_TabHovered
mvThemeCol_TabActive=internal_dpg.mvThemeCol_TabActive
mvThemeCol_TabUnfocused=internal_dpg.mvThemeCol_TabUnfocused
mvThemeCol_TabUnfocusedActive=internal_dpg.mvThemeCol_TabUnfocusedActive
mvThemeCol_DockingPreview=internal_dpg.mvThemeCol_DockingPreview
mvThemeCol_DockingEmptyBg=internal_dpg.mvThemeCol_DockingEmptyBg
mvThemeCol_PlotLines=internal_dpg.mvThemeCol_PlotLines
mvThemeCol_PlotLinesHovered=internal_dpg.mvThemeCol_PlotLinesHovered
mvThemeCol_PlotHistogram=internal_dpg.mvThemeCol_PlotHistogram
mvThemeCol_PlotHistogramHovered=internal_dpg.mvThemeCol_PlotHistogramHovered
mvThemeCol_TableHeaderBg=internal_dpg.mvThemeCol_TableHeaderBg
mvThemeCol_TableBorderStrong=internal_dpg.mvThemeCol_TableBorderStrong
mvThemeCol_TableBorderLight=internal_dpg.mvThemeCol_TableBorderLight
mvThemeCol_TableRowBg=internal_dpg.mvThemeCol_TableRowBg
mvThemeCol_TableRowBgAlt=internal_dpg.mvThemeCol_TableRowBgAlt
mvThemeCol_TextSelectedBg=internal_dpg.mvThemeCol_TextSelectedBg
mvThemeCol_DragDropTarget=internal_dpg.mvThemeCol_DragDropTarget
mvThemeCol_NavHighlight=internal_dpg.mvThemeCol_NavHighlight
mvThemeCol_NavWindowingHighlight=internal_dpg.mvThemeCol_NavWindowingHighlight
mvThemeCol_NavWindowingDimBg=internal_dpg.mvThemeCol_NavWindowingDimBg
mvThemeCol_ModalWindowDimBg=internal_dpg.mvThemeCol_ModalWindowDimBg
mvPlotCol_Line=internal_dpg.mvPlotCol_Line
mvPlotCol_Fill=internal_dpg.mvPlotCol_Fill
mvPlotCol_MarkerOutline=internal_dpg.mvPlotCol_MarkerOutline
mvPlotCol_MarkerFill=internal_dpg.mvPlotCol_MarkerFill
mvPlotCol_ErrorBar=internal_dpg.mvPlotCol_ErrorBar
mvPlotCol_FrameBg=internal_dpg.mvPlotCol_FrameBg
mvPlotCol_PlotBg=internal_dpg.mvPlotCol_PlotBg
mvPlotCol_PlotBorder=internal_dpg.mvPlotCol_PlotBorder
mvPlotCol_LegendBg=internal_dpg.mvPlotCol_LegendBg
mvPlotCol_LegendBorder=internal_dpg.mvPlotCol_LegendBorder
mvPlotCol_LegendText=internal_dpg.mvPlotCol_LegendText
mvPlotCol_TitleText=internal_dpg.mvPlotCol_TitleText
mvPlotCol_InlayText=internal_dpg.mvPlotCol_InlayText
mvPlotCol_XAxis=internal_dpg.mvPlotCol_XAxis
mvPlotCol_XAxisGrid=internal_dpg.mvPlotCol_XAxisGrid
mvPlotCol_YAxis=internal_dpg.mvPlotCol_YAxis
mvPlotCol_YAxisGrid=internal_dpg.mvPlotCol_YAxisGrid
mvPlotCol_YAxis2=internal_dpg.mvPlotCol_YAxis2
mvPlotCol_YAxisGrid2=internal_dpg.mvPlotCol_YAxisGrid2
mvPlotCol_YAxis3=internal_dpg.mvPlotCol_YAxis3
mvPlotCol_YAxisGrid3=internal_dpg.mvPlotCol_YAxisGrid3
mvPlotCol_Selection=internal_dpg.mvPlotCol_Selection
mvPlotCol_Query=internal_dpg.mvPlotCol_Query
mvPlotCol_Crosshairs=internal_dpg.mvPlotCol_Crosshairs
mvNodeCol_NodeBackground=internal_dpg.mvNodeCol_NodeBackground
mvNodeCol_NodeBackgroundHovered=internal_dpg.mvNodeCol_NodeBackgroundHovered
mvNodeCol_NodeBackgroundSelected=internal_dpg.mvNodeCol_NodeBackgroundSelected
mvNodeCol_NodeOutline=internal_dpg.mvNodeCol_NodeOutline
mvNodeCol_TitleBar=internal_dpg.mvNodeCol_TitleBar
mvNodeCol_TitleBarHovered=internal_dpg.mvNodeCol_TitleBarHovered
mvNodeCol_TitleBarSelected=internal_dpg.mvNodeCol_TitleBarSelected
mvNodeCol_Link=internal_dpg.mvNodeCol_Link
mvNodeCol_LinkHovered=internal_dpg.mvNodeCol_LinkHovered
mvNodeCol_LinkSelected=internal_dpg.mvNodeCol_LinkSelected
mvNodeCol_Pin=internal_dpg.mvNodeCol_Pin
mvNodeCol_PinHovered=internal_dpg.mvNodeCol_PinHovered
mvNodeCol_BoxSelector=internal_dpg.mvNodeCol_BoxSelector
mvNodeCol_BoxSelectorOutline=internal_dpg.mvNodeCol_BoxSelectorOutline
mvNodeCol_GridBackground=internal_dpg.mvNodeCol_GridBackground
mvNodeCol_GridLine=internal_dpg.mvNodeCol_GridLine
mvStyleVar_Alpha=internal_dpg.mvStyleVar_Alpha
mvStyleVar_WindowPadding=internal_dpg.mvStyleVar_WindowPadding
mvStyleVar_WindowRounding=internal_dpg.mvStyleVar_WindowRounding
mvStyleVar_WindowBorderSize=internal_dpg.mvStyleVar_WindowBorderSize
mvStyleVar_WindowMinSize=internal_dpg.mvStyleVar_WindowMinSize
mvStyleVar_WindowTitleAlign=internal_dpg.mvStyleVar_WindowTitleAlign
mvStyleVar_ChildRounding=internal_dpg.mvStyleVar_ChildRounding
mvStyleVar_ChildBorderSize=internal_dpg.mvStyleVar_ChildBorderSize
mvStyleVar_PopupRounding=internal_dpg.mvStyleVar_PopupRounding
mvStyleVar_PopupBorderSize=internal_dpg.mvStyleVar_PopupBorderSize
mvStyleVar_FramePadding=internal_dpg.mvStyleVar_FramePadding
mvStyleVar_FrameRounding=internal_dpg.mvStyleVar_FrameRounding
mvStyleVar_FrameBorderSize=internal_dpg.mvStyleVar_FrameBorderSize
mvStyleVar_ItemSpacing=internal_dpg.mvStyleVar_ItemSpacing
mvStyleVar_ItemInnerSpacing=internal_dpg.mvStyleVar_ItemInnerSpacing
mvStyleVar_IndentSpacing=internal_dpg.mvStyleVar_IndentSpacing
mvStyleVar_CellPadding=internal_dpg.mvStyleVar_CellPadding
mvStyleVar_ScrollbarSize=internal_dpg.mvStyleVar_ScrollbarSize
mvStyleVar_ScrollbarRounding=internal_dpg.mvStyleVar_ScrollbarRounding
mvStyleVar_GrabMinSize=internal_dpg.mvStyleVar_GrabMinSize
mvStyleVar_GrabRounding=internal_dpg.mvStyleVar_GrabRounding
mvStyleVar_TabRounding=internal_dpg.mvStyleVar_TabRounding
mvStyleVar_ButtonTextAlign=internal_dpg.mvStyleVar_ButtonTextAlign
mvStyleVar_SelectableTextAlign=internal_dpg.mvStyleVar_SelectableTextAlign
mvPlotStyleVar_LineWeight=internal_dpg.mvPlotStyleVar_LineWeight
mvPlotStyleVar_Marker=internal_dpg.mvPlotStyleVar_Marker
mvPlotStyleVar_MarkerSize=internal_dpg.mvPlotStyleVar_MarkerSize
mvPlotStyleVar_MarkerWeight=internal_dpg.mvPlotStyleVar_MarkerWeight
mvPlotStyleVar_FillAlpha=internal_dpg.mvPlotStyleVar_FillAlpha
mvPlotStyleVar_ErrorBarSize=internal_dpg.mvPlotStyleVar_ErrorBarSize
mvPlotStyleVar_ErrorBarWeight=internal_dpg.mvPlotStyleVar_ErrorBarWeight
mvPlotStyleVar_DigitalBitHeight=internal_dpg.mvPlotStyleVar_DigitalBitHeight
mvPlotStyleVar_DigitalBitGap=internal_dpg.mvPlotStyleVar_DigitalBitGap
mvPlotStyleVar_PlotBorderSize=internal_dpg.mvPlotStyleVar_PlotBorderSize
mvPlotStyleVar_MinorAlpha=internal_dpg.mvPlotStyleVar_MinorAlpha
mvPlotStyleVar_MajorTickLen=internal_dpg.mvPlotStyleVar_MajorTickLen
mvPlotStyleVar_MinorTickLen=internal_dpg.mvPlotStyleVar_MinorTickLen
mvPlotStyleVar_MajorTickSize=internal_dpg.mvPlotStyleVar_MajorTickSize
mvPlotStyleVar_MinorTickSize=internal_dpg.mvPlotStyleVar_MinorTickSize
mvPlotStyleVar_MajorGridSize=internal_dpg.mvPlotStyleVar_MajorGridSize
mvPlotStyleVar_MinorGridSize=internal_dpg.mvPlotStyleVar_MinorGridSize
mvPlotStyleVar_PlotPadding=internal_dpg.mvPlotStyleVar_PlotPadding
mvPlotStyleVar_LabelPadding=internal_dpg.mvPlotStyleVar_LabelPadding
mvPlotStyleVar_LegendPadding=internal_dpg.mvPlotStyleVar_LegendPadding
mvPlotStyleVar_LegendInnerPadding=internal_dpg.mvPlotStyleVar_LegendInnerPadding
mvPlotStyleVar_LegendSpacing=internal_dpg.mvPlotStyleVar_LegendSpacing
mvPlotStyleVar_MousePosPadding=internal_dpg.mvPlotStyleVar_MousePosPadding
mvPlotStyleVar_AnnotationPadding=internal_dpg.mvPlotStyleVar_AnnotationPadding
mvPlotStyleVar_FitPadding=internal_dpg.mvPlotStyleVar_FitPadding
mvPlotStyleVar_PlotDefaultSize=internal_dpg.mvPlotStyleVar_PlotDefaultSize
mvPlotStyleVar_PlotMinSize=internal_dpg.mvPlotStyleVar_PlotMinSize
mvNodeStyleVar_GridSpacing=internal_dpg.mvNodeStyleVar_GridSpacing
mvNodeStyleVar_NodeCornerRounding=internal_dpg.mvNodeStyleVar_NodeCornerRounding
mvNodeStyleVar_NodePaddingHorizontal=internal_dpg.mvNodeStyleVar_NodePaddingHorizontal
mvNodeStyleVar_NodePaddingVertical=internal_dpg.mvNodeStyleVar_NodePaddingVertical
mvNodeStyleVar_NodeBorderThickness=internal_dpg.mvNodeStyleVar_NodeBorderThickness
mvNodeStyleVar_LinkThickness=internal_dpg.mvNodeStyleVar_LinkThickness
mvNodeStyleVar_LinkLineSegmentsPerLength=internal_dpg.mvNodeStyleVar_LinkLineSegmentsPerLength
mvNodeStyleVar_LinkHoverDistance=internal_dpg.mvNodeStyleVar_LinkHoverDistance
mvNodeStyleVar_PinCircleRadius=internal_dpg.mvNodeStyleVar_PinCircleRadius
mvNodeStyleVar_PinQuadSideLength=internal_dpg.mvNodeStyleVar_PinQuadSideLength
mvNodeStyleVar_PinTriangleSideLength=internal_dpg.mvNodeStyleVar_PinTriangleSideLength
mvNodeStyleVar_PinLineThickness=internal_dpg.mvNodeStyleVar_PinLineThickness
mvNodeStyleVar_PinHoverRadius=internal_dpg.mvNodeStyleVar_PinHoverRadius
mvNodeStyleVar_PinOffset=internal_dpg.mvNodeStyleVar_PinOffset
mvInputText=internal_dpg.mvInputText
mvButton=internal_dpg.mvButton
mvRadioButton=internal_dpg.mvRadioButton
mvTabBar=internal_dpg.mvTabBar
mvTab=internal_dpg.mvTab
mvImage=internal_dpg.mvImage
mvMenuBar=internal_dpg.mvMenuBar
mvViewportMenuBar=internal_dpg.mvViewportMenuBar
mvMenu=internal_dpg.mvMenu
mvMenuItem=internal_dpg.mvMenuItem
mvChildWindow=internal_dpg.mvChildWindow
mvGroup=internal_dpg.mvGroup
mvSliderFloat=internal_dpg.mvSliderFloat
mvSliderInt=internal_dpg.mvSliderInt
mvFilterSet=internal_dpg.mvFilterSet
mvDragFloat=internal_dpg.mvDragFloat
mvDragInt=internal_dpg.mvDragInt
mvInputFloat=internal_dpg.mvInputFloat
mvInputInt=internal_dpg.mvInputInt
mvColorEdit=internal_dpg.mvColorEdit
mvClipper=internal_dpg.mvClipper
mvColorPicker=internal_dpg.mvColorPicker
mvTooltip=internal_dpg.mvTooltip
mvCollapsingHeader=internal_dpg.mvCollapsingHeader
mvSeparator=internal_dpg.mvSeparator
mvCheckbox=internal_dpg.mvCheckbox
mvListbox=internal_dpg.mvListbox
mvText=internal_dpg.mvText
mvCombo=internal_dpg.mvCombo
mvPlot=internal_dpg.mvPlot
mvSimplePlot=internal_dpg.mvSimplePlot
mvDrawlist=internal_dpg.mvDrawlist
mvWindowAppItem=internal_dpg.mvWindowAppItem
mvSelectable=internal_dpg.mvSelectable
mvTreeNode=internal_dpg.mvTreeNode
mvProgressBar=internal_dpg.mvProgressBar
mvSpacer=internal_dpg.mvSpacer
mvImageButton=internal_dpg.mvImageButton
mvTimePicker=internal_dpg.mvTimePicker
mvDatePicker=internal_dpg.mvDatePicker
mvColorButton=internal_dpg.mvColorButton
mvFileDialog=internal_dpg.mvFileDialog
mvTabButton=internal_dpg.mvTabButton
mvDrawNode=internal_dpg.mvDrawNode
mvNodeEditor=internal_dpg.mvNodeEditor
mvNode=internal_dpg.mvNode
mvNodeAttribute=internal_dpg.mvNodeAttribute
mvTable=internal_dpg.mvTable
mvTableColumn=internal_dpg.mvTableColumn
mvTableRow=internal_dpg.mvTableRow
mvDrawLine=internal_dpg.mvDrawLine
mvDrawArrow=internal_dpg.mvDrawArrow
mvDrawTriangle=internal_dpg.mvDrawTriangle
mvDrawImageQuad=internal_dpg.mvDrawImageQuad
mvDrawCircle=internal_dpg.mvDrawCircle
mvDrawEllipse=internal_dpg.mvDrawEllipse
mvDrawBezierCubic=internal_dpg.mvDrawBezierCubic
mvDrawBezierQuadratic=internal_dpg.mvDrawBezierQuadratic
mvDrawQuad=internal_dpg.mvDrawQuad
mvDrawRect=internal_dpg.mvDrawRect
mvDrawText=internal_dpg.mvDrawText
mvDrawPolygon=internal_dpg.mvDrawPolygon
mvDrawPolyline=internal_dpg.mvDrawPolyline
mvDrawImage=internal_dpg.mvDrawImage
mvDragFloatMulti=internal_dpg.mvDragFloatMulti
mvDragIntMulti=internal_dpg.mvDragIntMulti
mvSliderFloatMulti=internal_dpg.mvSliderFloatMulti
mvSliderIntMulti=internal_dpg.mvSliderIntMulti
mvInputIntMulti=internal_dpg.mvInputIntMulti
mvInputFloatMulti=internal_dpg.mvInputFloatMulti
mvDragPoint=internal_dpg.mvDragPoint
mvDragLine=internal_dpg.mvDragLine
mvAnnotation=internal_dpg.mvAnnotation
mvLineSeries=internal_dpg.mvLineSeries
mvScatterSeries=internal_dpg.mvScatterSeries
mvStemSeries=internal_dpg.mvStemSeries
mvStairSeries=internal_dpg.mvStairSeries
mvBarSeries=internal_dpg.mvBarSeries
mvErrorSeries=internal_dpg.mvErrorSeries
mvVLineSeries=internal_dpg.mvVLineSeries
mvHLineSeries=internal_dpg.mvHLineSeries
mvHeatSeries=internal_dpg.mvHeatSeries
mvImageSeries=internal_dpg.mvImageSeries
mvPieSeries=internal_dpg.mvPieSeries
mvShadeSeries=internal_dpg.mvShadeSeries
mvLabelSeries=internal_dpg.mvLabelSeries
mvHistogramSeries=internal_dpg.mvHistogramSeries
mv2dHistogramSeries=internal_dpg.mv2dHistogramSeries
mvCandleSeries=internal_dpg.mvCandleSeries
mvAreaSeries=internal_dpg.mvAreaSeries
mvColorMapScale=internal_dpg.mvColorMapScale
mvSlider3D=internal_dpg.mvSlider3D
mvKnobFloat=internal_dpg.mvKnobFloat
mvLoadingIndicator=internal_dpg.mvLoadingIndicator
mvNodeLink=internal_dpg.mvNodeLink
mvTextureRegistry=internal_dpg.mvTextureRegistry
mvStaticTexture=internal_dpg.mvStaticTexture
mvDynamicTexture=internal_dpg.mvDynamicTexture
mvStage=internal_dpg.mvStage
mvDrawLayer=internal_dpg.mvDrawLayer
mvViewportDrawlist=internal_dpg.mvViewportDrawlist
mvFileExtension=internal_dpg.mvFileExtension
mvPlotLegend=internal_dpg.mvPlotLegend
mvPlotAxis=internal_dpg.mvPlotAxis
mvHandlerRegistry=internal_dpg.mvHandlerRegistry
mvKeyDownHandler=internal_dpg.mvKeyDownHandler
mvKeyPressHandler=internal_dpg.mvKeyPressHandler
mvKeyReleaseHandler=internal_dpg.mvKeyReleaseHandler
mvMouseMoveHandler=internal_dpg.mvMouseMoveHandler
mvMouseWheelHandler=internal_dpg.mvMouseWheelHandler
mvMouseClickHandler=internal_dpg.mvMouseClickHandler
mvMouseDoubleClickHandler=internal_dpg.mvMouseDoubleClickHandler
mvMouseDownHandler=internal_dpg.mvMouseDownHandler
mvMouseReleaseHandler=internal_dpg.mvMouseReleaseHandler
mvMouseDragHandler=internal_dpg.mvMouseDragHandler
mvHoverHandler=internal_dpg.mvHoverHandler
mvActiveHandler=internal_dpg.mvActiveHandler
mvFocusHandler=internal_dpg.mvFocusHandler
mvVisibleHandler=internal_dpg.mvVisibleHandler
mvEditedHandler=internal_dpg.mvEditedHandler
mvActivatedHandler=internal_dpg.mvActivatedHandler
mvDeactivatedHandler=internal_dpg.mvDeactivatedHandler
mvDeactivatedAfterEditHandler=internal_dpg.mvDeactivatedAfterEditHandler
mvToggledOpenHandler=internal_dpg.mvToggledOpenHandler
mvClickedHandler=internal_dpg.mvClickedHandler
mvDragPayload=internal_dpg.mvDragPayload
mvResizeHandler=internal_dpg.mvResizeHandler
mvFont=internal_dpg.mvFont
mvFontRegistry=internal_dpg.mvFontRegistry
mvTheme=internal_dpg.mvTheme
mvThemeColor=internal_dpg.mvThemeColor
mvThemeStyle=internal_dpg.mvThemeStyle
mvThemeComponent=internal_dpg.mvThemeComponent
mvFontRangeHint=internal_dpg.mvFontRangeHint
mvFontRange=internal_dpg.mvFontRange
mvFontChars=internal_dpg.mvFontChars
mvCharRemap=internal_dpg.mvCharRemap
mvValueRegistry=internal_dpg.mvValueRegistry
mvIntValue=internal_dpg.mvIntValue
mvFloatValue=internal_dpg.mvFloatValue
mvFloat4Value=internal_dpg.mvFloat4Value
mvInt4Value=internal_dpg.mvInt4Value
mvBoolValue=internal_dpg.mvBoolValue
mvStringValue=internal_dpg.mvStringValue
mvDoubleValue=internal_dpg.mvDoubleValue
mvDouble4Value=internal_dpg.mvDouble4Value
mvColorValue=internal_dpg.mvColorValue
mvFloatVectValue=internal_dpg.mvFloatVectValue
mvSeriesValue=internal_dpg.mvSeriesValue
mvRawTexture=internal_dpg.mvRawTexture
mvSubPlots=internal_dpg.mvSubPlots
mvColorMap=internal_dpg.mvColorMap
mvColorMapRegistry=internal_dpg.mvColorMapRegistry
mvColorMapButton=internal_dpg.mvColorMapButton
mvColorMapSlider=internal_dpg.mvColorMapSlider
mvTemplateRegistry=internal_dpg.mvTemplateRegistry
mvTableCell=internal_dpg.mvTableCell
mvItemHandlerRegistry=internal_dpg.mvItemHandlerRegistry
mvReservedUUID_0=internal_dpg.mvReservedUUID_0
mvReservedUUID_1=internal_dpg.mvReservedUUID_1
mvReservedUUID_2=internal_dpg.mvReservedUUID_2
mvReservedUUID_3=internal_dpg.mvReservedUUID_3
mvReservedUUID_4=internal_dpg.mvReservedUUID_4
mvReservedUUID_5=internal_dpg.mvReservedUUID_5
mvReservedUUID_6=internal_dpg.mvReservedUUID_6
mvReservedUUID_7=internal_dpg.mvReservedUUID_7
mvReservedUUID_8=internal_dpg.mvReservedUUID_8
mvReservedUUID_9=internal_dpg.mvReservedUUID_9
| 56.135813
| 1,319
| 0.737808
|
bca34c403a4539cc61e0e270d4d3befc8c32dddf
| 29,490
|
py
|
Python
|
Lib/site-packages/tensorflow_core/contrib/boosted_trees/proto/learner_pb2.py
|
shivammaniharsahu/django_api
|
6ffb3d9f70f30f5fd3ae06ec00a6dd7c7783a797
|
[
"bzip2-1.0.6"
] | 2
|
2019-08-04T20:28:14.000Z
|
2019-10-27T23:26:42.000Z
|
Lib/site-packages/tensorflow_core/contrib/boosted_trees/proto/learner_pb2.py
|
shivammaniharsahu/django_api
|
6ffb3d9f70f30f5fd3ae06ec00a6dd7c7783a797
|
[
"bzip2-1.0.6"
] | null | null | null |
Lib/site-packages/tensorflow_core/contrib/boosted_trees/proto/learner_pb2.py
|
shivammaniharsahu/django_api
|
6ffb3d9f70f30f5fd3ae06ec00a6dd7c7783a797
|
[
"bzip2-1.0.6"
] | 1
|
2020-11-04T03:16:29.000Z
|
2020-11-04T03:16:29.000Z
|
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: tensorflow/contrib/boosted_trees/proto/learner.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='tensorflow/contrib/boosted_trees/proto/learner.proto',
package='tensorflow.boosted_trees.learner',
syntax='proto3',
serialized_options=_b('\370\001\001'),
serialized_pb=_b('\n4tensorflow/contrib/boosted_trees/proto/learner.proto\x12 tensorflow.boosted_trees.learner\"K\n\x18TreeRegularizationConfig\x12\n\n\x02l1\x18\x01 \x01(\x02\x12\n\n\x02l2\x18\x02 \x01(\x02\x12\x17\n\x0ftree_complexity\x18\x03 \x01(\x02\"v\n\x15TreeConstraintsConfig\x12\x16\n\x0emax_tree_depth\x18\x01 \x01(\r\x12\x17\n\x0fmin_node_weight\x18\x02 \x01(\x02\x12,\n$max_number_of_unique_feature_columns\x18\x03 \x01(\x03\"\x96\x02\n\x12LearningRateConfig\x12J\n\x05\x66ixed\x18\x01 \x01(\x0b\x32\x39.tensorflow.boosted_trees.learner.LearningRateFixedConfigH\x00\x12T\n\x07\x64ropout\x18\x02 \x01(\x0b\x32\x41.tensorflow.boosted_trees.learner.LearningRateDropoutDrivenConfigH\x00\x12U\n\x0bline_search\x18\x03 \x01(\x0b\x32>.tensorflow.boosted_trees.learner.LearningRateLineSearchConfigH\x00\x42\x07\n\x05tuner\"0\n\x17LearningRateFixedConfig\x12\x15\n\rlearning_rate\x18\x01 \x01(\x02\"L\n\x1cLearningRateLineSearchConfig\x12\x19\n\x11max_learning_rate\x18\x01 \x01(\x02\x12\x11\n\tnum_steps\x18\x02 \x01(\x05\"a\n\x0f\x41veragingConfig\x12\x1e\n\x14\x61verage_last_n_trees\x18\x01 \x01(\x02H\x00\x12$\n\x1a\x61verage_last_percent_trees\x18\x02 \x01(\x02H\x00\x42\x08\n\x06\x63onfig\"~\n\x1fLearningRateDropoutDrivenConfig\x12\x1b\n\x13\x64ropout_probability\x18\x01 \x01(\x02\x12\'\n\x1fprobability_of_skipping_dropout\x18\x02 \x01(\x02\x12\x15\n\rlearning_rate\x18\x03 \x01(\x02\"\x88\t\n\rLearnerConfig\x12\x13\n\x0bnum_classes\x18\x01 \x01(\r\x12#\n\x19\x66\x65\x61ture_fraction_per_tree\x18\x02 \x01(\x02H\x00\x12$\n\x1a\x66\x65\x61ture_fraction_per_level\x18\x03 \x01(\x02H\x00\x12R\n\x0eregularization\x18\x04 \x01(\x0b\x32:.tensorflow.boosted_trees.learner.TreeRegularizationConfig\x12L\n\x0b\x63onstraints\x18\x05 \x01(\x0b\x32\x37.tensorflow.boosted_trees.learner.TreeConstraintsConfig\x12Q\n\x0cpruning_mode\x18\x08 \x01(\x0e\x32;.tensorflow.boosted_trees.learner.LearnerConfig.PruningMode\x12Q\n\x0cgrowing_mode\x18\t \x01(\x0e\x32;.tensorflow.boosted_trees.learner.LearnerConfig.GrowingMode\x12Q\n\x13learning_rate_tuner\x18\x06 \x01(\x0b\x32\x34.tensorflow.boosted_trees.learner.LearningRateConfig\x12`\n\x14multi_class_strategy\x18\n \x01(\x0e\x32\x42.tensorflow.boosted_trees.learner.LearnerConfig.MultiClassStrategy\x12K\n\x10\x61veraging_config\x18\x0b \x01(\x0b\x32\x31.tensorflow.boosted_trees.learner.AveragingConfig\x12Z\n\x11weak_learner_type\x18\x0c \x01(\x0e\x32?.tensorflow.boosted_trees.learner.LearnerConfig.WeakLearnerType\"J\n\x0bPruningMode\x12\x1c\n\x18PRUNING_MODE_UNSPECIFIED\x10\x00\x12\r\n\tPRE_PRUNE\x10\x01\x12\x0e\n\nPOST_PRUNE\x10\x02\"O\n\x0bGrowingMode\x12\x1c\n\x18GROWING_MODE_UNSPECIFIED\x10\x00\x12\x0e\n\nWHOLE_TREE\x10\x01\x12\x12\n\x0eLAYER_BY_LAYER\x10\x02\"v\n\x12MultiClassStrategy\x12$\n MULTI_CLASS_STRATEGY_UNSPECIFIED\x10\x00\x12\x12\n\x0eTREE_PER_CLASS\x10\x01\x12\x10\n\x0c\x46ULL_HESSIAN\x10\x02\x12\x14\n\x10\x44IAGONAL_HESSIAN\x10\x03\"H\n\x0fWeakLearnerType\x12\x18\n\x14NORMAL_DECISION_TREE\x10\x00\x12\x1b\n\x17OBLIVIOUS_DECISION_TREE\x10\x01\x42\x12\n\x10\x66\x65\x61ture_fractionB\x03\xf8\x01\x01\x62\x06proto3')
)
_LEARNERCONFIG_PRUNINGMODE = _descriptor.EnumDescriptor(
name='PruningMode',
full_name='tensorflow.boosted_trees.learner.LearnerConfig.PruningMode',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='PRUNING_MODE_UNSPECIFIED', index=0, number=0,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='PRE_PRUNE', index=1, number=1,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='POST_PRUNE', index=2, number=2,
serialized_options=None,
type=None),
],
containing_type=None,
serialized_options=None,
serialized_start=1715,
serialized_end=1789,
)
_sym_db.RegisterEnumDescriptor(_LEARNERCONFIG_PRUNINGMODE)
_LEARNERCONFIG_GROWINGMODE = _descriptor.EnumDescriptor(
name='GrowingMode',
full_name='tensorflow.boosted_trees.learner.LearnerConfig.GrowingMode',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='GROWING_MODE_UNSPECIFIED', index=0, number=0,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='WHOLE_TREE', index=1, number=1,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='LAYER_BY_LAYER', index=2, number=2,
serialized_options=None,
type=None),
],
containing_type=None,
serialized_options=None,
serialized_start=1791,
serialized_end=1870,
)
_sym_db.RegisterEnumDescriptor(_LEARNERCONFIG_GROWINGMODE)
_LEARNERCONFIG_MULTICLASSSTRATEGY = _descriptor.EnumDescriptor(
name='MultiClassStrategy',
full_name='tensorflow.boosted_trees.learner.LearnerConfig.MultiClassStrategy',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='MULTI_CLASS_STRATEGY_UNSPECIFIED', index=0, number=0,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='TREE_PER_CLASS', index=1, number=1,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='FULL_HESSIAN', index=2, number=2,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DIAGONAL_HESSIAN', index=3, number=3,
serialized_options=None,
type=None),
],
containing_type=None,
serialized_options=None,
serialized_start=1872,
serialized_end=1990,
)
_sym_db.RegisterEnumDescriptor(_LEARNERCONFIG_MULTICLASSSTRATEGY)
_LEARNERCONFIG_WEAKLEARNERTYPE = _descriptor.EnumDescriptor(
name='WeakLearnerType',
full_name='tensorflow.boosted_trees.learner.LearnerConfig.WeakLearnerType',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='NORMAL_DECISION_TREE', index=0, number=0,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='OBLIVIOUS_DECISION_TREE', index=1, number=1,
serialized_options=None,
type=None),
],
containing_type=None,
serialized_options=None,
serialized_start=1992,
serialized_end=2064,
)
_sym_db.RegisterEnumDescriptor(_LEARNERCONFIG_WEAKLEARNERTYPE)
_TREEREGULARIZATIONCONFIG = _descriptor.Descriptor(
name='TreeRegularizationConfig',
full_name='tensorflow.boosted_trees.learner.TreeRegularizationConfig',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='l1', full_name='tensorflow.boosted_trees.learner.TreeRegularizationConfig.l1', index=0,
number=1, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='l2', full_name='tensorflow.boosted_trees.learner.TreeRegularizationConfig.l2', index=1,
number=2, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='tree_complexity', full_name='tensorflow.boosted_trees.learner.TreeRegularizationConfig.tree_complexity', index=2,
number=3, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=90,
serialized_end=165,
)
_TREECONSTRAINTSCONFIG = _descriptor.Descriptor(
name='TreeConstraintsConfig',
full_name='tensorflow.boosted_trees.learner.TreeConstraintsConfig',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='max_tree_depth', full_name='tensorflow.boosted_trees.learner.TreeConstraintsConfig.max_tree_depth', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='min_node_weight', full_name='tensorflow.boosted_trees.learner.TreeConstraintsConfig.min_node_weight', index=1,
number=2, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='max_number_of_unique_feature_columns', full_name='tensorflow.boosted_trees.learner.TreeConstraintsConfig.max_number_of_unique_feature_columns', index=2,
number=3, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=167,
serialized_end=285,
)
_LEARNINGRATECONFIG = _descriptor.Descriptor(
name='LearningRateConfig',
full_name='tensorflow.boosted_trees.learner.LearningRateConfig',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='fixed', full_name='tensorflow.boosted_trees.learner.LearningRateConfig.fixed', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='dropout', full_name='tensorflow.boosted_trees.learner.LearningRateConfig.dropout', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='line_search', full_name='tensorflow.boosted_trees.learner.LearningRateConfig.line_search', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='tuner', full_name='tensorflow.boosted_trees.learner.LearningRateConfig.tuner',
index=0, containing_type=None, fields=[]),
],
serialized_start=288,
serialized_end=566,
)
_LEARNINGRATEFIXEDCONFIG = _descriptor.Descriptor(
name='LearningRateFixedConfig',
full_name='tensorflow.boosted_trees.learner.LearningRateFixedConfig',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='learning_rate', full_name='tensorflow.boosted_trees.learner.LearningRateFixedConfig.learning_rate', index=0,
number=1, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=568,
serialized_end=616,
)
_LEARNINGRATELINESEARCHCONFIG = _descriptor.Descriptor(
name='LearningRateLineSearchConfig',
full_name='tensorflow.boosted_trees.learner.LearningRateLineSearchConfig',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='max_learning_rate', full_name='tensorflow.boosted_trees.learner.LearningRateLineSearchConfig.max_learning_rate', index=0,
number=1, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='num_steps', full_name='tensorflow.boosted_trees.learner.LearningRateLineSearchConfig.num_steps', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=618,
serialized_end=694,
)
_AVERAGINGCONFIG = _descriptor.Descriptor(
name='AveragingConfig',
full_name='tensorflow.boosted_trees.learner.AveragingConfig',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='average_last_n_trees', full_name='tensorflow.boosted_trees.learner.AveragingConfig.average_last_n_trees', index=0,
number=1, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='average_last_percent_trees', full_name='tensorflow.boosted_trees.learner.AveragingConfig.average_last_percent_trees', index=1,
number=2, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='config', full_name='tensorflow.boosted_trees.learner.AveragingConfig.config',
index=0, containing_type=None, fields=[]),
],
serialized_start=696,
serialized_end=793,
)
_LEARNINGRATEDROPOUTDRIVENCONFIG = _descriptor.Descriptor(
name='LearningRateDropoutDrivenConfig',
full_name='tensorflow.boosted_trees.learner.LearningRateDropoutDrivenConfig',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='dropout_probability', full_name='tensorflow.boosted_trees.learner.LearningRateDropoutDrivenConfig.dropout_probability', index=0,
number=1, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='probability_of_skipping_dropout', full_name='tensorflow.boosted_trees.learner.LearningRateDropoutDrivenConfig.probability_of_skipping_dropout', index=1,
number=2, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='learning_rate', full_name='tensorflow.boosted_trees.learner.LearningRateDropoutDrivenConfig.learning_rate', index=2,
number=3, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=795,
serialized_end=921,
)
_LEARNERCONFIG = _descriptor.Descriptor(
name='LearnerConfig',
full_name='tensorflow.boosted_trees.learner.LearnerConfig',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='num_classes', full_name='tensorflow.boosted_trees.learner.LearnerConfig.num_classes', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='feature_fraction_per_tree', full_name='tensorflow.boosted_trees.learner.LearnerConfig.feature_fraction_per_tree', index=1,
number=2, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='feature_fraction_per_level', full_name='tensorflow.boosted_trees.learner.LearnerConfig.feature_fraction_per_level', index=2,
number=3, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='regularization', full_name='tensorflow.boosted_trees.learner.LearnerConfig.regularization', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='constraints', full_name='tensorflow.boosted_trees.learner.LearnerConfig.constraints', index=4,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='pruning_mode', full_name='tensorflow.boosted_trees.learner.LearnerConfig.pruning_mode', index=5,
number=8, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='growing_mode', full_name='tensorflow.boosted_trees.learner.LearnerConfig.growing_mode', index=6,
number=9, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='learning_rate_tuner', full_name='tensorflow.boosted_trees.learner.LearnerConfig.learning_rate_tuner', index=7,
number=6, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='multi_class_strategy', full_name='tensorflow.boosted_trees.learner.LearnerConfig.multi_class_strategy', index=8,
number=10, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='averaging_config', full_name='tensorflow.boosted_trees.learner.LearnerConfig.averaging_config', index=9,
number=11, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='weak_learner_type', full_name='tensorflow.boosted_trees.learner.LearnerConfig.weak_learner_type', index=10,
number=12, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
_LEARNERCONFIG_PRUNINGMODE,
_LEARNERCONFIG_GROWINGMODE,
_LEARNERCONFIG_MULTICLASSSTRATEGY,
_LEARNERCONFIG_WEAKLEARNERTYPE,
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='feature_fraction', full_name='tensorflow.boosted_trees.learner.LearnerConfig.feature_fraction',
index=0, containing_type=None, fields=[]),
],
serialized_start=924,
serialized_end=2084,
)
_LEARNINGRATECONFIG.fields_by_name['fixed'].message_type = _LEARNINGRATEFIXEDCONFIG
_LEARNINGRATECONFIG.fields_by_name['dropout'].message_type = _LEARNINGRATEDROPOUTDRIVENCONFIG
_LEARNINGRATECONFIG.fields_by_name['line_search'].message_type = _LEARNINGRATELINESEARCHCONFIG
_LEARNINGRATECONFIG.oneofs_by_name['tuner'].fields.append(
_LEARNINGRATECONFIG.fields_by_name['fixed'])
_LEARNINGRATECONFIG.fields_by_name['fixed'].containing_oneof = _LEARNINGRATECONFIG.oneofs_by_name['tuner']
_LEARNINGRATECONFIG.oneofs_by_name['tuner'].fields.append(
_LEARNINGRATECONFIG.fields_by_name['dropout'])
_LEARNINGRATECONFIG.fields_by_name['dropout'].containing_oneof = _LEARNINGRATECONFIG.oneofs_by_name['tuner']
_LEARNINGRATECONFIG.oneofs_by_name['tuner'].fields.append(
_LEARNINGRATECONFIG.fields_by_name['line_search'])
_LEARNINGRATECONFIG.fields_by_name['line_search'].containing_oneof = _LEARNINGRATECONFIG.oneofs_by_name['tuner']
_AVERAGINGCONFIG.oneofs_by_name['config'].fields.append(
_AVERAGINGCONFIG.fields_by_name['average_last_n_trees'])
_AVERAGINGCONFIG.fields_by_name['average_last_n_trees'].containing_oneof = _AVERAGINGCONFIG.oneofs_by_name['config']
_AVERAGINGCONFIG.oneofs_by_name['config'].fields.append(
_AVERAGINGCONFIG.fields_by_name['average_last_percent_trees'])
_AVERAGINGCONFIG.fields_by_name['average_last_percent_trees'].containing_oneof = _AVERAGINGCONFIG.oneofs_by_name['config']
_LEARNERCONFIG.fields_by_name['regularization'].message_type = _TREEREGULARIZATIONCONFIG
_LEARNERCONFIG.fields_by_name['constraints'].message_type = _TREECONSTRAINTSCONFIG
_LEARNERCONFIG.fields_by_name['pruning_mode'].enum_type = _LEARNERCONFIG_PRUNINGMODE
_LEARNERCONFIG.fields_by_name['growing_mode'].enum_type = _LEARNERCONFIG_GROWINGMODE
_LEARNERCONFIG.fields_by_name['learning_rate_tuner'].message_type = _LEARNINGRATECONFIG
_LEARNERCONFIG.fields_by_name['multi_class_strategy'].enum_type = _LEARNERCONFIG_MULTICLASSSTRATEGY
_LEARNERCONFIG.fields_by_name['averaging_config'].message_type = _AVERAGINGCONFIG
_LEARNERCONFIG.fields_by_name['weak_learner_type'].enum_type = _LEARNERCONFIG_WEAKLEARNERTYPE
_LEARNERCONFIG_PRUNINGMODE.containing_type = _LEARNERCONFIG
_LEARNERCONFIG_GROWINGMODE.containing_type = _LEARNERCONFIG
_LEARNERCONFIG_MULTICLASSSTRATEGY.containing_type = _LEARNERCONFIG
_LEARNERCONFIG_WEAKLEARNERTYPE.containing_type = _LEARNERCONFIG
_LEARNERCONFIG.oneofs_by_name['feature_fraction'].fields.append(
_LEARNERCONFIG.fields_by_name['feature_fraction_per_tree'])
_LEARNERCONFIG.fields_by_name['feature_fraction_per_tree'].containing_oneof = _LEARNERCONFIG.oneofs_by_name['feature_fraction']
_LEARNERCONFIG.oneofs_by_name['feature_fraction'].fields.append(
_LEARNERCONFIG.fields_by_name['feature_fraction_per_level'])
_LEARNERCONFIG.fields_by_name['feature_fraction_per_level'].containing_oneof = _LEARNERCONFIG.oneofs_by_name['feature_fraction']
DESCRIPTOR.message_types_by_name['TreeRegularizationConfig'] = _TREEREGULARIZATIONCONFIG
DESCRIPTOR.message_types_by_name['TreeConstraintsConfig'] = _TREECONSTRAINTSCONFIG
DESCRIPTOR.message_types_by_name['LearningRateConfig'] = _LEARNINGRATECONFIG
DESCRIPTOR.message_types_by_name['LearningRateFixedConfig'] = _LEARNINGRATEFIXEDCONFIG
DESCRIPTOR.message_types_by_name['LearningRateLineSearchConfig'] = _LEARNINGRATELINESEARCHCONFIG
DESCRIPTOR.message_types_by_name['AveragingConfig'] = _AVERAGINGCONFIG
DESCRIPTOR.message_types_by_name['LearningRateDropoutDrivenConfig'] = _LEARNINGRATEDROPOUTDRIVENCONFIG
DESCRIPTOR.message_types_by_name['LearnerConfig'] = _LEARNERCONFIG
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
TreeRegularizationConfig = _reflection.GeneratedProtocolMessageType('TreeRegularizationConfig', (_message.Message,), {
'DESCRIPTOR' : _TREEREGULARIZATIONCONFIG,
'__module__' : 'tensorflow.contrib.boosted_trees.proto.learner_pb2'
# @@protoc_insertion_point(class_scope:tensorflow.boosted_trees.learner.TreeRegularizationConfig)
})
_sym_db.RegisterMessage(TreeRegularizationConfig)
TreeConstraintsConfig = _reflection.GeneratedProtocolMessageType('TreeConstraintsConfig', (_message.Message,), {
'DESCRIPTOR' : _TREECONSTRAINTSCONFIG,
'__module__' : 'tensorflow.contrib.boosted_trees.proto.learner_pb2'
# @@protoc_insertion_point(class_scope:tensorflow.boosted_trees.learner.TreeConstraintsConfig)
})
_sym_db.RegisterMessage(TreeConstraintsConfig)
LearningRateConfig = _reflection.GeneratedProtocolMessageType('LearningRateConfig', (_message.Message,), {
'DESCRIPTOR' : _LEARNINGRATECONFIG,
'__module__' : 'tensorflow.contrib.boosted_trees.proto.learner_pb2'
# @@protoc_insertion_point(class_scope:tensorflow.boosted_trees.learner.LearningRateConfig)
})
_sym_db.RegisterMessage(LearningRateConfig)
LearningRateFixedConfig = _reflection.GeneratedProtocolMessageType('LearningRateFixedConfig', (_message.Message,), {
'DESCRIPTOR' : _LEARNINGRATEFIXEDCONFIG,
'__module__' : 'tensorflow.contrib.boosted_trees.proto.learner_pb2'
# @@protoc_insertion_point(class_scope:tensorflow.boosted_trees.learner.LearningRateFixedConfig)
})
_sym_db.RegisterMessage(LearningRateFixedConfig)
LearningRateLineSearchConfig = _reflection.GeneratedProtocolMessageType('LearningRateLineSearchConfig', (_message.Message,), {
'DESCRIPTOR' : _LEARNINGRATELINESEARCHCONFIG,
'__module__' : 'tensorflow.contrib.boosted_trees.proto.learner_pb2'
# @@protoc_insertion_point(class_scope:tensorflow.boosted_trees.learner.LearningRateLineSearchConfig)
})
_sym_db.RegisterMessage(LearningRateLineSearchConfig)
AveragingConfig = _reflection.GeneratedProtocolMessageType('AveragingConfig', (_message.Message,), {
'DESCRIPTOR' : _AVERAGINGCONFIG,
'__module__' : 'tensorflow.contrib.boosted_trees.proto.learner_pb2'
# @@protoc_insertion_point(class_scope:tensorflow.boosted_trees.learner.AveragingConfig)
})
_sym_db.RegisterMessage(AveragingConfig)
LearningRateDropoutDrivenConfig = _reflection.GeneratedProtocolMessageType('LearningRateDropoutDrivenConfig', (_message.Message,), {
'DESCRIPTOR' : _LEARNINGRATEDROPOUTDRIVENCONFIG,
'__module__' : 'tensorflow.contrib.boosted_trees.proto.learner_pb2'
# @@protoc_insertion_point(class_scope:tensorflow.boosted_trees.learner.LearningRateDropoutDrivenConfig)
})
_sym_db.RegisterMessage(LearningRateDropoutDrivenConfig)
LearnerConfig = _reflection.GeneratedProtocolMessageType('LearnerConfig', (_message.Message,), {
'DESCRIPTOR' : _LEARNERCONFIG,
'__module__' : 'tensorflow.contrib.boosted_trees.proto.learner_pb2'
# @@protoc_insertion_point(class_scope:tensorflow.boosted_trees.learner.LearnerConfig)
})
_sym_db.RegisterMessage(LearnerConfig)
DESCRIPTOR._options = None
# @@protoc_insertion_point(module_scope)
| 46.222571
| 3,097
| 0.782469
|
7511af5c0a722274b460c3760a807a009cc8b447
| 439
|
py
|
Python
|
tests/dev_controller.py
|
bbockelm/htcondor_jobview
|
ee8e7d9127f0218f99ce61532bc8cdfbf9708f24
|
[
"Apache-2.0"
] | 4
|
2016-11-03T15:57:13.000Z
|
2021-03-08T16:56:08.000Z
|
tests/dev_controller.py
|
bbockelm/htcondor_jobview
|
ee8e7d9127f0218f99ce61532bc8cdfbf9708f24
|
[
"Apache-2.0"
] | null | null | null |
tests/dev_controller.py
|
bbockelm/htcondor_jobview
|
ee8e7d9127f0218f99ce61532bc8cdfbf9708f24
|
[
"Apache-2.0"
] | 1
|
2018-07-17T14:39:49.000Z
|
2018-07-17T14:39:49.000Z
|
#!/usr/bin/python
import os
import sys
if os.path.exists("src"):
sys.path.append("src")
from wsgiref.simple_server import make_server
from htcondor_jobview.jobview_app import application
httpd = make_server('', 8000, application)
httpd.base_environ['jobview.config'] = 'tests/unl.conf'
httpd.base_environ['jobview.templates'] = 'templates'
print "Serving on port 8000..."
# Serve until process is killed
httpd.serve_forever()
| 19.086957
| 55
| 0.753986
|
64a5585a6c0343f78dadd1862483d6dfca8ca1be
| 451
|
py
|
Python
|
.history/src/Simulador_20200707131332.py
|
eduardodut/Trabalho_final_estatistica_cd
|
fbedbbea6bdd7a79e1d62030cde0fab4e93fc338
|
[
"MIT"
] | null | null | null |
.history/src/Simulador_20200707131332.py
|
eduardodut/Trabalho_final_estatistica_cd
|
fbedbbea6bdd7a79e1d62030cde0fab4e93fc338
|
[
"MIT"
] | null | null | null |
.history/src/Simulador_20200707131332.py
|
eduardodut/Trabalho_final_estatistica_cd
|
fbedbbea6bdd7a79e1d62030cde0fab4e93fc338
|
[
"MIT"
] | null | null | null |
import pandas as pd
import numpy as np
from Matriz_esferica import Matriz_esferica
from Individuo import Individuo
class Simulador():
def __init__(self, tamanho_matriz,):
self.num_iteracoes = 0
self.matriz_individuos = np.asmatrix(tamanho_matriz)
self.matriz_esferica = Matriz_esferica(tamanho_matriz)
self.dataframe = pd.DataFrame(columns= [''])
pass
sim = Simulador(2)
print(sim.matriz_individuos[0,0])
| 22.55
| 62
| 0.727273
|
9d1cf0bb83e8a9396d0943dd92b39f295c1de946
| 106
|
py
|
Python
|
cryptodock_framework/portfolio_manager/__init__.py
|
the-launch-tech/cryptodock-framework
|
de5a8ea8d4bcc427ff122cba8684abfd6a483152
|
[
"MIT"
] | null | null | null |
cryptodock_framework/portfolio_manager/__init__.py
|
the-launch-tech/cryptodock-framework
|
de5a8ea8d4bcc427ff122cba8684abfd6a483152
|
[
"MIT"
] | null | null | null |
cryptodock_framework/portfolio_manager/__init__.py
|
the-launch-tech/cryptodock-framework
|
de5a8ea8d4bcc427ff122cba8684abfd6a483152
|
[
"MIT"
] | null | null | null |
__all__ = [
'CryptoDockPortfolioManager'
]
from .portfolio_manager import CryptoDockPortfolioManager
| 17.666667
| 57
| 0.811321
|
1ef19966c005fb09e72b0d2b045c06221c23ba25
| 1,172
|
py
|
Python
|
catalog/export_xml.py
|
pedroalvesfilho/flask_vehicle
|
702854eb14ef0e4d4c1231687a08a3b123b5b7c8
|
[
"MIT"
] | null | null | null |
catalog/export_xml.py
|
pedroalvesfilho/flask_vehicle
|
702854eb14ef0e4d4c1231687a08a3b123b5b7c8
|
[
"MIT"
] | 1
|
2021-04-30T20:46:37.000Z
|
2021-04-30T20:46:37.000Z
|
catalog/export_xml.py
|
pedroalvesfilho/flask_vehicle
|
702854eb14ef0e4d4c1231687a08a3b123b5b7c8
|
[
"MIT"
] | null | null | null |
"""Provides an XML API endpoint."""
from xml.etree.ElementTree import Element, SubElement, tostring
from xml.dom.minidom import parseString
from catalog import app
from catalog.database_setup import Category, Item
from catalog.connect_to_database import connect_to_database
@app.route('/catalog.xml/')
def items_xml():
session = connect_to_database()
categories = session.query(Category).all()
root = Element('catalog')
for category in categories:
cat_tag = SubElement(root,
'category',
{'id':str(category.id), 'name':category.name})
items = session.query(Item).filter_by(category=category).all()
for item in items:
item_tag = SubElement(cat_tag, 'item', {'id':str(item.id)})
name_tag = SubElement(item_tag, 'name')
name_tag.text = item.name
desc_tag = SubElement(item_tag, 'description')
desc_tag.text = item.description
session.close()
# Return the XML with a 2 space indent to make it more human readable.
return parseString(tostring(root, 'utf-8')).toprettyxml(indent=' ')
| 34.470588
| 75
| 0.642491
|
3a0ba2b9aa4db59d3bbb2e1a7d979f1483fa2da9
| 640
|
py
|
Python
|
src/assemblyline/runtests.py
|
eventbrite/django-assemblyline
|
3f4e0524b54ea5d840f6989abc89613abcded575
|
[
"MIT"
] | 1
|
2016-05-23T15:11:58.000Z
|
2016-05-23T15:11:58.000Z
|
src/assemblyline/runtests.py
|
mscheibe/django-assemblyline
|
170db91f43ac915d4c671e2fc342a60df5cc3b35
|
[
"MIT"
] | null | null | null |
src/assemblyline/runtests.py
|
mscheibe/django-assemblyline
|
170db91f43ac915d4c671e2fc342a60df5cc3b35
|
[
"MIT"
] | 2
|
2016-08-14T07:15:43.000Z
|
2021-09-08T11:57:38.000Z
|
#This file mainly exists to allow python setup.py test to work.
import os
import sys
def runtests():
# set the environment up so Django can find some settings
os.environ['DJANGO_SETTINGS_MODULE'] = 'assemblyline.testsettings'
# ...and now that it can find settings, import them
from django.conf import settings
from django.test.utils import get_runner
test_runner = get_runner(settings)(verbosity=1, interactive=True)
failures = test_runner.run_tests(['assemblyline',])
# exit with the failure information to satisfy unittest/setuptools
sys.exit(failures)
if __name__ == '__main__':
runtests()
| 29.090909
| 70
| 0.734375
|
44739aef486e738d006d0bc08016481c7ca6d68d
| 5,766
|
py
|
Python
|
city_scrapers/spiders/chi_police.py
|
zarifmahmud/city-scrapers
|
52d6056001c8ea5e100dd686c52947836d63aff9
|
[
"MIT"
] | null | null | null |
city_scrapers/spiders/chi_police.py
|
zarifmahmud/city-scrapers
|
52d6056001c8ea5e100dd686c52947836d63aff9
|
[
"MIT"
] | null | null | null |
city_scrapers/spiders/chi_police.py
|
zarifmahmud/city-scrapers
|
52d6056001c8ea5e100dd686c52947836d63aff9
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
All spiders should yield data shaped according to the Open Civic Data
specification (http://docs.opencivicdata.org/en/latest/data/event.html).
"""
import json
import re
from datetime import datetime
from city_scrapers.constants import COMMITTEE, POLICE_BEAT
from city_scrapers.spider import Spider
class ChiPoliceSpider(Spider):
name = 'chi_police'
agency_name = 'Chicago Police Department'
timezone = 'America/Chicago'
allowed_domains = [
'https://home.chicagopolice.org/wp-content/themes/cpd-bootstrap/proxy/miniProxy.php?https://home.chicagopolice.org/get-involved-with-caps/all-community-event-calendars/district-1/' # noqa
]
start_urls = [
'https://home.chicagopolice.org/wp-content/themes/cpd-bootstrap/proxy/miniProxy.php?https://home.chicagopolice.org/get-involved-with-caps/all-community-event-calendars/district-1/' # noqa
]
custom_settings = {
'USER_AGENT':
'Mozilla/5.0 (Linux; <Android Version>; <Build Tag etc.>) AppleWebKit/<WebKit Rev> (KHTML, like Gecko) Chrome/<Chrome Rev> Mobile Safari/<WebKit Rev>' # noqa
}
def parse(self, response):
"""
`parse` should always `yield` a dict that follows the `Open Civic Data
event standard <http://docs.opencivicdata.org/en/latest/data/event.html>`_.
Change the `_parse_id`, `_parse_name`, etc methods to fit your scraping
needs.
"""
try:
data = json.loads(response.body_as_unicode())
except json.decoder.JSONDecodeError:
return
for item in data:
# Drop events that aren't Beat meetings or DAC meetings
classification = self._parse_classification(item)
if not classification:
continue
data = {
'_type': 'event',
'id': self._parse_id(item),
'name': self._parse_name(classification, item),
'event_description': '',
'classification': classification,
'all_day': False,
'start': self._parse_start(item),
'end': self._parse_end(item),
'location': self._parse_location(item),
'documents': [],
'sources': self._parse_sources(item)
}
data['id'] = self._generate_id(data)
data['status'] = self._parse_status(data, item)
yield data
def _parse_status(self, data, item):
text = item.get('eventDetails', '')
if text is None:
text = ''
return self._generate_status(data, text)
def _parse_id(self, item):
"""
Calulate ID. ID must be unique within the data source being scraped.
"""
return str(item['calendarId'])
def _parse_classification(self, item):
"""
Returns one of the following:
* District Advisory Committee (DAC)
* Beat Meeting
* ''
"""
if (('district advisory committee' in item['title'].lower()) or ('DAC' in item['title'])):
return COMMITTEE
elif 'beat' in item['title'].lower():
return POLICE_BEAT
else:
return ''
def _parse_name(self, classification, item):
"""
Generate a name based on the classfication.
"""
if classification == COMMITTEE:
return 'District Advisory Committee'
elif classification == POLICE_BEAT:
return 'CAPS District {}, Beat {}'.format(item['calendarId'],
self._parse_beat(item)).strip()
else:
return None
def _parse_beat(self, item):
district = str(item['calendarId'])
beat_split = re.sub(r'[\D]+', ' ', item['title']).split()
beat_list = []
for beat_num in beat_split:
if len(beat_num) > 2 and beat_num.startswith(district):
beat_list.append(beat_num[len(district):])
else:
beat_list.append(beat_num)
if len(beat_list) == 1:
return beat_list[0]
elif len(beat_list) > 1:
return '{} and {}'.format(', '.join(beat_list[:-1]), beat_list[-1])
return ''
def _parse_location(self, item):
"""
Parses location, adding Chicago, IL to the end of the address
since it isn't included but can be safely assumed.
"""
if item['location']:
address = item['location'] + ' Chicago, IL'
else:
address = None
return {'address': address, 'name': '', 'neighborhood': ''}
def _parse_all_day(self, item):
"""
Parse or generate all-day status. Defaults to false.
"""
return False
def _parse_start(self, item):
"""
Parse start date and time.
"""
datetime_obj = datetime.strptime(item['start'], "%Y-%m-%dT%H:%M:%S")
return {'date': datetime_obj.date(), 'time': datetime_obj.time(), 'note': ''}
def _parse_end(self, item):
"""
Parse end date and time.
"""
try:
datetime_obj = datetime.strptime(item['end'], "%Y-%m-%dT%H:%M:%S")
except TypeError:
return {'date': None, 'time': None, 'note': 'no end time listed'}
else:
return {'date': datetime_obj.date(), 'time': datetime_obj.time(), 'note': ''}
def _parse_sources(self, item):
"""
Parse sources.
"""
return [{
'url': (
'https://home.chicagopolice.org/get-involved-with-caps/'
'all-community-event-calendars'
),
'note': ''
}]
| 35.158537
| 196
| 0.560527
|
a0a75b391f0a5e49f7274473a3406b830285ab86
| 1,291
|
py
|
Python
|
app/core/tests/test_models.py
|
graovic/recipe-app-api
|
e883c64c144b67689238d2506552cc16703bb6a4
|
[
"Apache-2.0"
] | null | null | null |
app/core/tests/test_models.py
|
graovic/recipe-app-api
|
e883c64c144b67689238d2506552cc16703bb6a4
|
[
"Apache-2.0"
] | null | null | null |
app/core/tests/test_models.py
|
graovic/recipe-app-api
|
e883c64c144b67689238d2506552cc16703bb6a4
|
[
"Apache-2.0"
] | null | null | null |
from django.test import TestCase
from django.contrib.auth import get_user_model
class ModelTest(TestCase):
def test_create_user_with_email_successful(self):
"""Test creating a new user with an email is successful"""
email = 'goran.raovic@gmail.com'
password = 'testpass123'
user = get_user_model().objects.create_user(email=email,
password=password)
self.assertEqual(user.email, email)
self.assertTrue(user.check_password(password))
def test_new_user_email_normalized(self):
"""Test the email for a new user is normalized"""
email = "test@GOOGLE.com"
user = get_user_model().objects.create_user(email, 'test123')
self.assertEqual(user.email, email.lower())
def test_new_user_invalid_email(self):
"""Test creating user with no email raises error"""
with self.assertRaises(ValueError):
get_user_model().objects.create_user(None, 'test123')
def test_create_new_superuser(self):
"""Test creating a new superuser"""
user = get_user_model().objects.create_superuser(
'goran.raovic@gmail.com', 'test123')
self.assertTrue(user.is_superuser)
self.assertTrue(user.is_staff)
| 37.970588
| 70
| 0.658404
|
b255bb49ee4d57e9ed835917f0ec5670c9478348
| 5,400
|
py
|
Python
|
apps/export/views.py
|
rapidsms/rapidsms-legacy
|
43c2ecd41fd1541a2538326edee3d9e816d84529
|
[
"BSD-3-Clause"
] | null | null | null |
apps/export/views.py
|
rapidsms/rapidsms-legacy
|
43c2ecd41fd1541a2538326edee3d9e816d84529
|
[
"BSD-3-Clause"
] | null | null | null |
apps/export/views.py
|
rapidsms/rapidsms-legacy
|
43c2ecd41fd1541a2538326edee3d9e816d84529
|
[
"BSD-3-Clause"
] | 1
|
2019-11-02T19:35:54.000Z
|
2019-11-02T19:35:54.000Z
|
#!/usr/bin/env python
# vim: ai ts=4 sts=4 et sw=4
import os, re
import datetime
from subprocess import *
from django import http
from django.db import models
from django.utils.text import capfirst
from django.core.exceptions import FieldError
from rapidsms.webui import settings
def database(req):
"""Returns a SQL dump of the current database, by reading the settings
from the config file, and calling the relevant dump program. Currently,
only mySQL and SQLite3 are supported."""
conf = vars(settings)
if settings.DATABASE_ENGINE == "mysql":
cmd = "mysqldump --host=%(DATABASE_HOST)s --user=%(DATABASE_USER)s --password=%(DATABASE_USER)s %(DATABASE_NAME)s" % (conf)
elif settings.DATABASE_ENGINE == "sqlite3":
cmd = "sqlite3 %(DATABASE_NAME)s .dump" % (conf)
else:
return HttpResponse(
"Sorry, %(DATABASE_ENGINE)s databases are not supported yet." % (conf),
status=500, content_type="text/plain")
# execute the dump command, and wait for it to terminate
proc = Popen([cmd], shell=True, stdin=PIPE, stdout=PIPE, stderr=PIPE)
sql = proc.communicate()
# download the file as plain text
today = datetime.datetime.now().strftime("%d-%m-%Y")
resp = http.HttpResponse(sql, mimetype="text/plain")
resp["content-disposition"] = "attachment; filename=%s.sql" % (today)
return resp
def _get_model(app_label, model_name):
model = models.get_model(app_label, model_name)
# check that the model is valid
if model is None:
raise http.Http404(
"App %r, model %r, not found."\
% (app_label, model_name))
return model
def str_to_excel(req):
def __table(str):
return "<table>\n%s</table>" % "".join(map(__row, str.split("\n")))
def __row(str):
return " <tr>\n%s </tr>\n" % "".join(map(__col, str.split("|")))
def __col(str):
str, cs = re.match("^(.*?)(?::(\d+)\s*)?$", str).groups()
return " <td colspan='%s'>%s</td>\n" % (cs, str)
# dump it as a simple html table
html = __table(req.POST["data"])
# download as an excel spreadsheet
resp = http.HttpResponse(html, mimetype='application/vnd.ms-excel')
resp["content-disposition"] = "attachment; filename=test.xls"
return resp
def model_to_excel(request, app_label, model_name, req_filters=None):
model = _get_model(app_label, model_name)
max_depth = 8
rows = []
# if no filters were explictly passed,
# then we will look for them in the GET
if req_filters is None:
req_filters = request.GET
# build a dict of filters, to control
# which objects we get. todo: is this
# dangerous? i can't see any way that
# it is, but it seems kind of wrong
filters = {}
for k ,v in req_filters.items():
filters[str(k)] = v
# fetch the data (might raise if any of the
# params couldn't be matched to model fields
try:
export_data = model.objects.filter(**filters)
except FieldError, e:
return http.HttpResponse(e.message,
status=500, mimetype="text/plain")
# sort the records if requested
if "sort" in req_filters:
export_data = export_data.order_by(str(req_filters["sort"]))
# this function builds a flat list of column titles (verbose names)
# recursively, to include as much data as possible in the export
def build_header(model, depth=0, prefix=""):
columns = []
for field in model._meta.fields:
caption = prefix + capfirst(field.verbose_name)
# if this field is a foreign key, then
# we will recurse to fetch it's fields
if (hasattr(field, "rel")) and (field.rel is not None) and (depth < max_depth):
columns.extend(build_header(field.rel.to, depth+1, caption + ": "))
# not a foreign key, so append
# this column in it's raw form
else:
columns.append("<th>%s</th>" % (caption))
return columns
# the first row contains no data, just field names
rows.append("<tr>%s</tr>" % ("".join(build_header(model))))
# this function is *way* too similar to the function
# above to warrant its independance. abstraction!
def build_row(model, instance=None, depth=0):
columns = []
for field in model._meta.fields:
# fetch the value of this cell
if instance is not None:
cell = getattr(instance, field.name)
# the cell is NONE, but we'll still need to
# recurse if it's a foreign key, so the row
# doesn't end up shorter the rest
else: cell = None
# if this field is a foreign key, then
# we will recurse to fetch it's fields
if (hasattr(field, "rel")) and (field.rel is not None) and (depth < max_depth):
columns.extend(build_row(field.rel.to, cell, depth+1))
# if this cell is none, insert a blank column,
# so we don't have "None" all over the place
elif (cell is None):
columns.append("<td></td>")
# not a foreign key, so append
# this column in it's raw form
else: columns.append("<td>%s</td>" % (cell))
return columns
# the matrix of dumped data
for object in export_data:
row = "".join(build_row(model, object))
rows.append("<tr>%s</tr>" % (row))
# dump it as a simple html table
html = "<table>%s</table>" % ("\n".join(rows))
# download as an excel spreadsheet
resp = http.HttpResponse(html, mimetype='application/vnd.ms-excel')
resp["content-disposition"] = "attachment; filename=%s.xls" % model_name
return resp
| 30.167598
| 131
| 0.660926
|
31101ab899de0b52bd3f69c710a04286dd78adea
| 9,331
|
py
|
Python
|
pythreejs/pythreejs.py
|
elPistolero/pythreejs
|
f3c5000a6fcf06af775664d55ad6ef16322e8eca
|
[
"BSD-3-Clause"
] | null | null | null |
pythreejs/pythreejs.py
|
elPistolero/pythreejs
|
f3c5000a6fcf06af775664d55ad6ef16322e8eca
|
[
"BSD-3-Clause"
] | null | null | null |
pythreejs/pythreejs.py
|
elPistolero/pythreejs
|
f3c5000a6fcf06af775664d55ad6ef16322e8eca
|
[
"BSD-3-Clause"
] | 1
|
2019-11-04T09:00:17.000Z
|
2019-11-04T09:00:17.000Z
|
r"""
Python widgets for three.js plotting
In this wrapping of three.js, we try to stay close to the three.js API. Often,
the three.js documentation at http://threejs.org/docs/ helps in understanding
these classes and the various constants.
This is meant to be a low-level wrapper around three.js. We hope that others
will use this foundation to build higher-level interfaces to build 3d plots.
"""
from __future__ import absolute_import
from ipywidgets import Widget, widget_serialization, Color
from traitlets import Unicode, CInt, Instance, List, CFloat, Bool, observe, validate
import numpy as np
from ._package import npm_pkg_name
from ._version import EXTENSION_SPEC_VERSION
from .core.BufferAttribute import BufferAttribute
from .core.Geometry import Geometry
from .core.BufferGeometry import BufferGeometry
from .geometries.BoxGeometry_autogen import BoxGeometry
from .geometries.SphereGeometry_autogen import SphereGeometry
from .lights.AmbientLight_autogen import AmbientLight
from .lights.DirectionalLight_autogen import DirectionalLight
from .materials.Material_autogen import Material
from .materials.MeshLambertMaterial_autogen import MeshLambertMaterial
from .materials.SpriteMaterial_autogen import SpriteMaterial
from .objects.Group_autogen import Group
from .objects.Line_autogen import Line
from .objects.Mesh_autogen import Mesh
from .objects.Sprite_autogen import Sprite
from .textures.Texture_autogen import Texture
from .textures.DataTexture import DataTexture
from .textures.TextTexture_autogen import TextTexture
def grid_indices_gen(nx, ny):
"""A generator for grid vertex indices.
"""
for x in range(nx - 1):
for y in range(ny - 1):
root = x + y * ny
yield (root, root + 1, root + nx)
yield (root + nx, root + 1, root + nx + 1)
class SurfaceGeometry(BufferGeometry):
"""
A regular grid with heights
"""
z = List(CFloat, [0] * 100)
width = CInt(10)
height = CInt(10)
width_segments = CInt(10, read_only=True)
height_segments = CInt(10, read_only=True)
def __init__(self, **kwargs):
for key in ['width_segments', 'height_segments']:
if key in kwargs:
self.set_trait(key, kwargs.pop(key))
super(SurfaceGeometry, self).__init__(**kwargs)
self._update_surface()
@observe('z', 'width', 'height')
def _on_change(self, change):
# Only trigger automatically after initial creation
if 'position' in self.attributes:
self._update_surface()
def _update_surface(self):
nx = self.width_segments + 1
ny = self.height_segments + 1
x = np.linspace(-self.width/2, self.width/2, nx)
y = np.linspace(-self.height/2, self.height/2, ny)
xx, yy = np.meshgrid(x, y)
z = np.array(self.z).reshape((nx, ny))
positions = np.dstack((xx, yy, z)).reshape(nx * ny, 3).astype(np.float32)
dx, dy = np.gradient(z, self.width/nx, self.height/ny)
normals = np.dstack((-dx, -dy, np.ones_like(dx))).reshape(nx * ny, 3).astype(np.float32)
vmin = np.min(positions, 0)[:2]
vrange = np.max(positions, 0)[:2] - vmin
uvs = ((positions[:, :2] - vmin) / vrange)
indices = np.array(tuple(grid_indices_gen(nx, ny)), dtype=np.uint16).ravel()
if 'position' not in self.attributes:
# Initial values:
self.attributes = {
'position': BufferAttribute(positions),
'index': BufferAttribute(indices),
'normal': BufferAttribute(normals),
'uv': BufferAttribute(uvs),
}
else:
# We're updating
with self.hold_trait_notifications():
self.attributes['position'].array = positions
self.attributes['index'].array = indices
self.attributes['normal'].array = normals
self.attributes['uv'].array = uvs
def SurfaceGrid(geometry, material, **kwargs):
"""A grid covering a surface.
This will draw a line mesh overlaying the SurfaceGeometry.
"""
nx = geometry.width_segments + 1
ny = geometry.height_segments + 1
vertices = geometry.attributes['position'].array
lines = []
for x in range(nx):
g = Geometry(vertices=[vertices[y * nx + x, :].tolist() for y in range(ny)])
lines.append(Line(g, material))
for y in range(ny):
g = Geometry(vertices=[vertices[y * nx + x, :].tolist() for x in range(nx)])
lines.append(Line(g, material))
def _update_lines(change):
vertices = geometry.attributes['position'].array
for x in range(nx):
g = lines[x].geometry
g.vertices = [vertices[y * nx + x, :].tolist() for y in range(ny)]
for y in range(ny):
g = lines[nx + y].geometry
g.vertices = [vertices[y * nx + x, :].tolist() for x in range(nx)]
geometry.attributes['position'].observe(_update_lines, names=('array'))
return Group(children=lines, **kwargs)
class PlotMesh(Mesh):
plot = Instance('sage.plot.plot3d.base.Graphics3d')
def _plot_changed(self, name, old, new):
self.type = new.scenetree_json()['type']
if self.type == 'object':
self.type = new.scenetree_json()['geometry']['type']
self.material = self.material_from_object(new)
else:
self.type = new.scenetree_json()['children'][0]['geometry']['type']
self.material = self.material_from_other(new)
if self.type == 'index_face_set':
self.geometry = self.geometry_from_plot(new)
elif self.type == 'sphere':
self.geometry = self.geometry_from_sphere(new)
elif self.type == 'box':
self.geometry = self.geometry_from_box(new)
def material_from_object(self, p):
# TODO: do this without scenetree_json()
t = p.texture.scenetree_json()
m = MeshLambertMaterial(side='DoubleSide')
m.color = t['color']
m.opacity = t['opacity']
# TODO: support other attributes
return m
def material_from_other(self, p):
# TODO: do this without scenetree_json()
t = p.scenetree_json()['children'][0]['texture']
m = MeshLambertMaterial(side='DoubleSide')
m.color = t['color']
m.opacity = t['opacity']
# TODO: support other attributes
return m
def geometry_from_box(self, p):
g = BoxGeometry()
g.width = p.scenetree_json()['geometry']['size'][0]
g.height = p.scenetree_json()['geometry']['size'][1]
g.depth = p.scenetree_json()['geometry']['size'][2]
return g
def geometry_from_sphere(self, p):
g = SphereGeometry()
g.radius = p.scenetree_json()['children'][0]['geometry']['radius']
return g
def geometry_from_plot(self, p):
from itertools import groupby, chain
def flatten(ll):
return list(chain.from_iterable(ll))
p.triangulate()
g = FaceGeometry()
g.vertices = flatten(p.vertices())
f = p.index_faces()
f.sort(key=len)
faces = {k: flatten(v) for k, v in groupby(f, len)}
g.face3 = faces.get(3, [])
g.face4 = faces.get(4, [])
return g
# Some helper classes and functions
def lights_color():
return [
AmbientLight(color=(0.312, 0.188, 0.4)),
DirectionalLight(position=[1, 0, 1], color=[.8, 0, 0]),
DirectionalLight(position=[1, 1, 1], color=[0, .8, 0]),
DirectionalLight(position=[0, 1, 1], color=[0, 0, .8]),
DirectionalLight(position=[-1, -1, -1], color=[.9, .7, .9]),
]
def lights_gray():
return [
AmbientLight(color=[.6, .6, .6]),
DirectionalLight(position=[0, 1, 1], color=[.5, .5, .5]),
DirectionalLight(position=[0, 0, 1], color=[.5, .5, .5]),
DirectionalLight(position=[1, 1, 1], color=[.5, .5, .5]),
DirectionalLight(position=[-1, -1, -1], color=[.7, .7, .7]),
]
def make_text(text, position=(0, 0, 0), height=1):
"""
Return a text object at the specified location with a given height
"""
sm = SpriteMaterial(map=TextTexture(string=text, color='white', size=100, squareTexture=False))
return Sprite(material=sm, position=position, scaleToTexture=True, scale=[1, height, 1])
def height_texture(z, colormap='viridis'):
"""Create a texture corresponding to the heights in z and the given colormap."""
from matplotlib import cm
from skimage import img_as_ubyte
colormap = cm.get_cmap(colormap)
im = z.copy()
# rescale to be in [0,1], scale nan to be the smallest value
im -= np.nanmin(im)
im /= np.nanmax(im)
im = np.nan_to_num(im)
import warnings
with warnings.catch_warnings():
# ignore the precision warning that comes from converting floats to uint8 types
warnings.filterwarnings('ignore',
message='Possible precision loss when converting from',
category=UserWarning,
module='skimage.util.dtype')
rgba_im = img_as_ubyte(colormap(im)) # convert the values to rgba image using the colormap
return DataTexture(data=rgba_im, format='RGBAFormat')
| 36.449219
| 99
| 0.627907
|
c4ace9bf77fb4975bd3b2e93b2b9e5271d0c6f8b
| 13,949
|
py
|
Python
|
sdk/python/pulumi_azure_native/network/v20210201/ddos_protection_plan.py
|
polivbr/pulumi-azure-native
|
09571f3bf6bdc4f3621aabefd1ba6c0d4ecfb0e7
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure_native/network/v20210201/ddos_protection_plan.py
|
polivbr/pulumi-azure-native
|
09571f3bf6bdc4f3621aabefd1ba6c0d4ecfb0e7
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure_native/network/v20210201/ddos_protection_plan.py
|
polivbr/pulumi-azure-native
|
09571f3bf6bdc4f3621aabefd1ba6c0d4ecfb0e7
|
[
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
__all__ = ['DdosProtectionPlanArgs', 'DdosProtectionPlan']
@pulumi.input_type
class DdosProtectionPlanArgs:
def __init__(__self__, *,
resource_group_name: pulumi.Input[str],
ddos_protection_plan_name: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None):
"""
The set of arguments for constructing a DdosProtectionPlan resource.
:param pulumi.Input[str] resource_group_name: The name of the resource group.
:param pulumi.Input[str] ddos_protection_plan_name: The name of the DDoS protection plan.
:param pulumi.Input[str] location: Resource location.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: Resource tags.
"""
pulumi.set(__self__, "resource_group_name", resource_group_name)
if ddos_protection_plan_name is not None:
pulumi.set(__self__, "ddos_protection_plan_name", ddos_protection_plan_name)
if location is not None:
pulumi.set(__self__, "location", location)
if tags is not None:
pulumi.set(__self__, "tags", tags)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Input[str]:
"""
The name of the resource group.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: pulumi.Input[str]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter(name="ddosProtectionPlanName")
def ddos_protection_plan_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the DDoS protection plan.
"""
return pulumi.get(self, "ddos_protection_plan_name")
@ddos_protection_plan_name.setter
def ddos_protection_plan_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "ddos_protection_plan_name", value)
@property
@pulumi.getter
def location(self) -> Optional[pulumi.Input[str]]:
"""
Resource location.
"""
return pulumi.get(self, "location")
@location.setter
def location(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "location", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
Resource tags.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
class DdosProtectionPlan(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
ddos_protection_plan_name: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
__props__=None):
"""
A DDoS protection plan in a resource group.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] ddos_protection_plan_name: The name of the DDoS protection plan.
:param pulumi.Input[str] location: Resource location.
:param pulumi.Input[str] resource_group_name: The name of the resource group.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: Resource tags.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: DdosProtectionPlanArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
A DDoS protection plan in a resource group.
:param str resource_name: The name of the resource.
:param DdosProtectionPlanArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(DdosProtectionPlanArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
ddos_protection_plan_name: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = DdosProtectionPlanArgs.__new__(DdosProtectionPlanArgs)
__props__.__dict__["ddos_protection_plan_name"] = ddos_protection_plan_name
__props__.__dict__["location"] = location
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__.__dict__["resource_group_name"] = resource_group_name
__props__.__dict__["tags"] = tags
__props__.__dict__["etag"] = None
__props__.__dict__["name"] = None
__props__.__dict__["provisioning_state"] = None
__props__.__dict__["resource_guid"] = None
__props__.__dict__["type"] = None
__props__.__dict__["virtual_networks"] = None
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-nextgen:network/v20210201:DdosProtectionPlan"), pulumi.Alias(type_="azure-native:network:DdosProtectionPlan"), pulumi.Alias(type_="azure-nextgen:network:DdosProtectionPlan"), pulumi.Alias(type_="azure-native:network/v20180201:DdosProtectionPlan"), pulumi.Alias(type_="azure-nextgen:network/v20180201:DdosProtectionPlan"), pulumi.Alias(type_="azure-native:network/v20180401:DdosProtectionPlan"), pulumi.Alias(type_="azure-nextgen:network/v20180401:DdosProtectionPlan"), pulumi.Alias(type_="azure-native:network/v20180601:DdosProtectionPlan"), pulumi.Alias(type_="azure-nextgen:network/v20180601:DdosProtectionPlan"), pulumi.Alias(type_="azure-native:network/v20180701:DdosProtectionPlan"), pulumi.Alias(type_="azure-nextgen:network/v20180701:DdosProtectionPlan"), pulumi.Alias(type_="azure-native:network/v20180801:DdosProtectionPlan"), pulumi.Alias(type_="azure-nextgen:network/v20180801:DdosProtectionPlan"), pulumi.Alias(type_="azure-native:network/v20181001:DdosProtectionPlan"), pulumi.Alias(type_="azure-nextgen:network/v20181001:DdosProtectionPlan"), pulumi.Alias(type_="azure-native:network/v20181101:DdosProtectionPlan"), pulumi.Alias(type_="azure-nextgen:network/v20181101:DdosProtectionPlan"), pulumi.Alias(type_="azure-native:network/v20181201:DdosProtectionPlan"), pulumi.Alias(type_="azure-nextgen:network/v20181201:DdosProtectionPlan"), pulumi.Alias(type_="azure-native:network/v20190201:DdosProtectionPlan"), pulumi.Alias(type_="azure-nextgen:network/v20190201:DdosProtectionPlan"), pulumi.Alias(type_="azure-native:network/v20190401:DdosProtectionPlan"), pulumi.Alias(type_="azure-nextgen:network/v20190401:DdosProtectionPlan"), pulumi.Alias(type_="azure-native:network/v20190601:DdosProtectionPlan"), pulumi.Alias(type_="azure-nextgen:network/v20190601:DdosProtectionPlan"), pulumi.Alias(type_="azure-native:network/v20190701:DdosProtectionPlan"), pulumi.Alias(type_="azure-nextgen:network/v20190701:DdosProtectionPlan"), pulumi.Alias(type_="azure-native:network/v20190801:DdosProtectionPlan"), pulumi.Alias(type_="azure-nextgen:network/v20190801:DdosProtectionPlan"), pulumi.Alias(type_="azure-native:network/v20190901:DdosProtectionPlan"), pulumi.Alias(type_="azure-nextgen:network/v20190901:DdosProtectionPlan"), pulumi.Alias(type_="azure-native:network/v20191101:DdosProtectionPlan"), pulumi.Alias(type_="azure-nextgen:network/v20191101:DdosProtectionPlan"), pulumi.Alias(type_="azure-native:network/v20191201:DdosProtectionPlan"), pulumi.Alias(type_="azure-nextgen:network/v20191201:DdosProtectionPlan"), pulumi.Alias(type_="azure-native:network/v20200301:DdosProtectionPlan"), pulumi.Alias(type_="azure-nextgen:network/v20200301:DdosProtectionPlan"), pulumi.Alias(type_="azure-native:network/v20200401:DdosProtectionPlan"), pulumi.Alias(type_="azure-nextgen:network/v20200401:DdosProtectionPlan"), pulumi.Alias(type_="azure-native:network/v20200501:DdosProtectionPlan"), pulumi.Alias(type_="azure-nextgen:network/v20200501:DdosProtectionPlan"), pulumi.Alias(type_="azure-native:network/v20200601:DdosProtectionPlan"), pulumi.Alias(type_="azure-nextgen:network/v20200601:DdosProtectionPlan"), pulumi.Alias(type_="azure-native:network/v20200701:DdosProtectionPlan"), pulumi.Alias(type_="azure-nextgen:network/v20200701:DdosProtectionPlan"), pulumi.Alias(type_="azure-native:network/v20200801:DdosProtectionPlan"), pulumi.Alias(type_="azure-nextgen:network/v20200801:DdosProtectionPlan"), pulumi.Alias(type_="azure-native:network/v20201101:DdosProtectionPlan"), pulumi.Alias(type_="azure-nextgen:network/v20201101:DdosProtectionPlan"), pulumi.Alias(type_="azure-native:network/v20210301:DdosProtectionPlan"), pulumi.Alias(type_="azure-nextgen:network/v20210301:DdosProtectionPlan")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(DdosProtectionPlan, __self__).__init__(
'azure-native:network/v20210201:DdosProtectionPlan',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'DdosProtectionPlan':
"""
Get an existing DdosProtectionPlan resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = DdosProtectionPlanArgs.__new__(DdosProtectionPlanArgs)
__props__.__dict__["etag"] = None
__props__.__dict__["location"] = None
__props__.__dict__["name"] = None
__props__.__dict__["provisioning_state"] = None
__props__.__dict__["resource_guid"] = None
__props__.__dict__["tags"] = None
__props__.__dict__["type"] = None
__props__.__dict__["virtual_networks"] = None
return DdosProtectionPlan(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def etag(self) -> pulumi.Output[str]:
"""
A unique read-only string that changes whenever the resource is updated.
"""
return pulumi.get(self, "etag")
@property
@pulumi.getter
def location(self) -> pulumi.Output[Optional[str]]:
"""
Resource location.
"""
return pulumi.get(self, "location")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Resource name.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> pulumi.Output[str]:
"""
The provisioning state of the DDoS protection plan resource.
"""
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter(name="resourceGuid")
def resource_guid(self) -> pulumi.Output[str]:
"""
The resource GUID property of the DDoS protection plan resource. It uniquely identifies the resource, even if the user changes its name or migrate the resource across subscriptions or resource groups.
"""
return pulumi.get(self, "resource_guid")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Mapping[str, str]]]:
"""
Resource tags.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
"""
Resource type.
"""
return pulumi.get(self, "type")
@property
@pulumi.getter(name="virtualNetworks")
def virtual_networks(self) -> pulumi.Output[Sequence['outputs.SubResourceResponse']]:
"""
The list of virtual networks associated with the DDoS protection plan resource. This list is read-only.
"""
return pulumi.get(self, "virtual_networks")
| 54.701961
| 3,782
| 0.693885
|
36c94e8d7bc09fa4103b898b60ea271734440068
| 6,106
|
py
|
Python
|
telemetry/telemetry/internal/platform/fuchsia_device.py
|
Martijnve23/catapult
|
5c63b19d221af6a12889e8727acc85d93892cab7
|
[
"BSD-3-Clause"
] | 1
|
2021-07-04T03:26:43.000Z
|
2021-07-04T03:26:43.000Z
|
telemetry/telemetry/internal/platform/fuchsia_device.py
|
Martijnve23/catapult
|
5c63b19d221af6a12889e8727acc85d93892cab7
|
[
"BSD-3-Clause"
] | null | null | null |
telemetry/telemetry/internal/platform/fuchsia_device.py
|
Martijnve23/catapult
|
5c63b19d221af6a12889e8727acc85d93892cab7
|
[
"BSD-3-Clause"
] | null | null | null |
# Copyright 2019 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""A Fuchsia device instance"""
from __future__ import absolute_import
import logging
import os
import platform
import subprocess
import tarfile
from telemetry.core import fuchsia_interface
from telemetry.core import util
from telemetry.internal.platform import device
from telemetry.util import cmd_util
_LIST_DEVICES_TIMEOUT_SECS = 5
_SDK_SHA1 = '8894838554076535504'
_SDK_ROOT_IN_CATAPULT = os.path.join(util.GetCatapultDir(), 'third_party',
'fuchsia-sdk', 'sdk')
_SDK_ROOT_IN_CHROMIUM = os.path.join(util.GetCatapultDir(), '..',
'fuchsia-sdk', 'sdk')
_SDK_TOOLS = [
os.path.join('tools', 'device-finder'),
os.path.join('tools', 'symbolize')
]
class FuchsiaDevice(device.Device):
def __init__(self, target_name, host, ssh_config,
system_log_file, port, managed_repo):
super(FuchsiaDevice, self).__init__(
name='Fuchsia with host: %s' % host,
guid='fuchsia:%s' % target_name)
self._target_name = target_name
self._ssh_config = ssh_config
self._system_log_file = system_log_file
self._host = host
self._port = port
self._managed_repo = managed_repo
@classmethod
def GetAllConnectedDevices(cls, denylist):
return []
@property
def managed_repo(self):
return self._managed_repo
@property
def target_name(self):
return self._target_name
@property
def host(self):
return self._host
@property
def ssh_config(self):
return self._ssh_config
@property
def system_log_file(self):
return self._system_log_file
@property
def port(self):
return self._port
def _DownloadFuchsiaSDK(tar_file, dest=_SDK_ROOT_IN_CATAPULT):
if not os.path.isdir(dest):
os.makedirs(dest)
gsutil_path = os.path.join(util.GetCatapultDir(), 'third_party', 'gsutil',
'gsutil')
sdk_pkg = 'gs://fuchsia/sdk/core/linux-amd64/' + _SDK_SHA1
download_cmd = [gsutil_path, 'cp', sdk_pkg, tar_file]
subprocess.check_output(download_cmd, stderr=subprocess.STDOUT)
with tarfile.open(tar_file, 'r') as tar:
for f in _SDK_TOOLS:
# tarfile only accepts POSIX paths.
tar.extract(f.replace(os.path.sep, '/'), dest)
os.remove(tar_file)
def _FindFuchsiaDevice(sdk_root, is_emulator):
dev_finder_path = os.path.join(sdk_root, 'tools', 'device-finder')
if is_emulator:
logging.warning('Fuchsia emulators not supported at this time.')
return None
finder_cmd = [dev_finder_path, 'list', '-full', '-netboot',
'-timeout', str(_LIST_DEVICES_TIMEOUT_SECS) + 's']
device_list, _ = cmd_util.GetAllCmdOutput(finder_cmd)
if not device_list:
logging.warning('No Fuchsia device found. Ensure your device is set up '
'and can be connected to.')
return device_list
def _DownloadFuchsiaSDKIfNecessary():
"""Downloads the Fuchsia SDK if not found in Chromium and Catapult repo.
Returns:
The path to the Fuchsia SDK directory
"""
if os.path.exists(_SDK_ROOT_IN_CHROMIUM):
return _SDK_ROOT_IN_CHROMIUM
if not os.path.exists(_SDK_ROOT_IN_CATAPULT):
tar_file = os.path.join(_SDK_ROOT_IN_CATAPULT,
'fuchsia-sdk-%s.tar' % _SDK_SHA1)
_DownloadFuchsiaSDK(tar_file)
return _SDK_ROOT_IN_CATAPULT
def FindAllAvailableDevices(options):
"""Returns a list of available device types."""
# Will not find Fuchsia devices if Fuchsia browser is not specified.
# This means that unless specifying browser=web-engine-shell, the user
# will not see web-engine-shell as an available browser.
if options.browser_type not in fuchsia_interface.FUCHSIA_BROWSERS:
return []
if (platform.system() != 'Linux' or (
platform.machine() != 'x86_64' and platform.machine() != 'aarch64')):
logging.warning(
'Fuchsia in Telemetry only supports Linux x64 or arm64hosts.')
return []
# If the ssh port of the device has been forwarded to a port on the host,
# return that device directly.
if options.fuchsia_ssh_port:
return [FuchsiaDevice(target_name='local_device',
host='localhost',
system_log_file=options.fuchsia_system_log_file,
ssh_config=options.fuchsia_ssh_config,
port=options.fuchsia_ssh_port,
managed_repo=options.fuchsia_repo)]
# If the IP address of the device is specified, use that directly.
elif options.fuchsia_device_address:
return [FuchsiaDevice(target_name='device_target',
host=options.fuchsia_device_address,
system_log_file=options.fuchsia_system_log_file,
ssh_config=options.fuchsia_ssh_config,
port=options.fuchsia_ssh_port,
managed_repo=options.fuchsia_repo)]
# Download the Fuchsia SDK if it doesn't exist.
# TODO(https://crbug.com/1031763): Figure out how to use the dependency
# manager.
sdk_root = _DownloadFuchsiaSDKIfNecessary()
try:
device_list = _FindFuchsiaDevice(sdk_root, False)
except OSError:
logging.error('Fuchsia SDK Download failed. Please remove '
'%s and try again.', sdk_root)
raise
if not device_list:
return []
# Expected output will look something like
# 'host0 target0\nhost1 target1\nhost2 target2'.
first_device = device_list.splitlines()[0]
host, target_name = first_device.split(' ')
logging.info('Using Fuchsia device with address %s and name %s'
% (host, target_name))
return [FuchsiaDevice(target_name=target_name,
host=host,
system_log_file=options.fuchsia_system_log_file,
ssh_config=options.fuchsia_ssh_config,
port=options.fuchsia_ssh_port,
managed_repo=options.fuchsia_repo)]
| 34.303371
| 76
| 0.675729
|
00dee54650063b5522a3100f624d9a125ce7a01b
| 388
|
py
|
Python
|
app/wsgi.py
|
StevenMedina/MovieAPI
|
805e79d396e197383bce6095febf0252231a1018
|
[
"MIT"
] | null | null | null |
app/wsgi.py
|
StevenMedina/MovieAPI
|
805e79d396e197383bce6095febf0252231a1018
|
[
"MIT"
] | null | null | null |
app/wsgi.py
|
StevenMedina/MovieAPI
|
805e79d396e197383bce6095febf0252231a1018
|
[
"MIT"
] | null | null | null |
"""
WSGI config for omnibank project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'app.settings')
application = get_wsgi_application()
| 22.823529
| 78
| 0.783505
|
309865549b0680df44e3d283fcaa28042caa02ec
| 1,926
|
py
|
Python
|
src/Pybind11Wraps/Boundary/AxisBoundaryRZ.py
|
jmikeowen/Spheral
|
3e1082a7aefd6b328bd3ae24ca1a477108cfc3c4
|
[
"BSD-Source-Code",
"BSD-3-Clause-LBNL",
"FSFAP"
] | 22
|
2018-07-31T21:38:22.000Z
|
2020-06-29T08:58:33.000Z
|
src/Pybind11Wraps/Boundary/AxisBoundaryRZ.py
|
jmikeowen/Spheral
|
3e1082a7aefd6b328bd3ae24ca1a477108cfc3c4
|
[
"BSD-Source-Code",
"BSD-3-Clause-LBNL",
"FSFAP"
] | 41
|
2020-09-28T23:14:27.000Z
|
2022-03-28T17:01:33.000Z
|
src/Pybind11Wraps/Boundary/AxisBoundaryRZ.py
|
jmikeowen/Spheral
|
3e1082a7aefd6b328bd3ae24ca1a477108cfc3c4
|
[
"BSD-Source-Code",
"BSD-3-Clause-LBNL",
"FSFAP"
] | 7
|
2019-12-01T07:00:06.000Z
|
2020-09-15T21:12:39.000Z
|
#-------------------------------------------------------------------------------
# AxisBoundaryRZ
#-------------------------------------------------------------------------------
from PYB11Generator import *
from Boundary import *
from BoundaryAbstractMethods import *
@PYB11template()
@PYB11template_dict({"Dimension" : "Dim<2>"})
class AxisBoundaryRZ(Boundary):
PYB11typedefs = """
typedef %(Dimension)s::Scalar Scalar;
typedef %(Dimension)s::Vector Vector;
typedef %(Dimension)s::Tensor Tensor;
typedef %(Dimension)s::SymTensor SymTensor;
typedef %(Dimension)s::ThirdRankTensor ThirdRankTensor;
typedef %(Dimension)s::FourthRankTensor FourthRankTensor;
typedef %(Dimension)s::FifthRankTensor FifthRankTensor;
typedef %(Dimension)s::FacetedVolume FacetedVolume;
"""
#...........................................................................
# Constructors
def pyinit(self,
etamin = "double"):
"Construct with the DataBase"
#...........................................................................
# Methods
@PYB11virtual
def setViolationNodes(self, nodeList="NodeList<%(Dimension)s>&"):
return "void"
@PYB11virtual
def updateViolationNodes(self, nodeList="NodeList<%(Dimension)s>&"):
return "void"
@PYB11virtual
@PYB11const
def label(self):
"The label for writing in restart files"
return "std::string"
#...........................................................................
# Properties
etamin = PYB11property("double", "etamin", "etamin", doc="The fuzz value for approaching the axis")
#-------------------------------------------------------------------------------
# Inject methods
#-------------------------------------------------------------------------------
#PYB11inject(BoundaryAbstractMethods, AxisBoundaryRZ, virtual=True, pure_virtual=False)
| 36.339623
| 103
| 0.492731
|
d94e087e5a91ffc3b890eaf740fc60b69c448b43
| 560
|
py
|
Python
|
jacquard/storage/tests/test_file.py
|
peteowlett/jacquard
|
772fd633e521501688e0933482cba45f48c23ef9
|
[
"MIT"
] | null | null | null |
jacquard/storage/tests/test_file.py
|
peteowlett/jacquard
|
772fd633e521501688e0933482cba45f48c23ef9
|
[
"MIT"
] | null | null | null |
jacquard/storage/tests/test_file.py
|
peteowlett/jacquard
|
772fd633e521501688e0933482cba45f48c23ef9
|
[
"MIT"
] | null | null | null |
import unittest
from jacquard.storage.file import FileStore
from jacquard.storage.testing_utils import StorageGauntlet
class FileGauntletTest(StorageGauntlet, unittest.TestCase):
def open_storage(self):
return FileStore(':memory:')
def test_exceptions_back_out_writes():
storage = FileStore(':memory:')
try:
with storage.transaction() as store:
store['foo'] = "Blah"
raise RuntimeError()
except RuntimeError:
pass
with storage.transaction() as store:
assert 'foo' not in store
| 23.333333
| 59
| 0.685714
|
ea73ab3a3be66386d3b4d9467185caada18f7b22
| 497
|
py
|
Python
|
market/test/test_zones.py
|
dpsommer/market
|
36df4527fcf4f0ef99207c8b8e63172429cfd226
|
[
"MIT"
] | null | null | null |
market/test/test_zones.py
|
dpsommer/market
|
36df4527fcf4f0ef99207c8b8e63172429cfd226
|
[
"MIT"
] | null | null | null |
market/test/test_zones.py
|
dpsommer/market
|
36df4527fcf4f0ef99207c8b8e63172429cfd226
|
[
"MIT"
] | null | null | null |
import unittest
from market.data.zones import Zone, world_map
from market.test import MockDataTestCase
class TestZones(MockDataTestCase):
def test_seeded_map_generation(self):
pass
def test_add_zone_to_map(self):
town = Zone('Town')
world_map.add_zone(town)
self.assertIn(town, world_map._zones)
def test_dynamic_map_expansion(self):
pass
def test_zone_persistence(self):
pass
if __name__ == "__main__":
unittest.main()
| 19.115385
| 45
| 0.694165
|
49bd760f4e20e2a12e5686b3193bdba2895612e4
| 4,617
|
py
|
Python
|
paddle/trainer/tests/testPyDataWrapper.py
|
TarzanQll/Paddle-master
|
1f192b22c641f91bd98a5babe7189ac5d7d3b408
|
[
"Apache-2.0"
] | 1
|
2016-10-23T09:31:38.000Z
|
2016-10-23T09:31:38.000Z
|
paddle/trainer/tests/testPyDataWrapper.py
|
TarzanQll/Paddle-master
|
1f192b22c641f91bd98a5babe7189ac5d7d3b408
|
[
"Apache-2.0"
] | 3
|
2016-10-22T16:06:11.000Z
|
2016-11-07T06:30:37.000Z
|
paddle/trainer/tests/testPyDataWrapper.py
|
TarzanQll/Paddle-master
|
1f192b22c641f91bd98a5babe7189ac5d7d3b408
|
[
"Apache-2.0"
] | 1
|
2019-10-26T12:51:13.000Z
|
2019-10-26T12:51:13.000Z
|
# Copyright (c) 2016 Baidu, Inc. All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
sys.path.append("../")
from paddle.trainer.PyDataProviderWrapper import *
import random
import json
import string
@provider(slots=[SparseNonValueSlot(10), DenseSlot(2), SparseValueSlot(10), StringSlot(1), IndexSlot(3)])
def processNonSequenceData(obj, filename):
with open(filename, "rb") as f:
for line in f:
slots_str = line.split(';')
index = int(slots_str[0])
non_values = map(int, slots_str[1].split()[1:])
dense = map(float, slots_str[2].split()[1:])
strs = slots_str[4].strip().split(' ', 1)[1]
def __values_mapper__(s):
s = s.split(":")
return int(s[0]), float(s[1])
values = map(__values_mapper__, slots_str[3].split()[1:])
yield [non_values, dense, values, strs, index]
SPARSE_ID_LIMIT = 1000
SPARSE_ID_COUNT = 100
SEQUENCE_LIMIT = 50
STRING_LIMIT = 10
sparse_id_randomer = lambda: random.randrange(0, SPARSE_ID_LIMIT - 1)
sparse_count_randomer = lambda: random.randrange(1, SPARSE_ID_COUNT)
val_randomer = lambda: random.uniform(-1.0, 1.0)
seq_count_randomer = lambda: random.randrange(1, SEQUENCE_LIMIT)
str_count_randomer = lambda: random.randrange(1, STRING_LIMIT)
class IDRandomer(): # A random generator, return unique id
def __init__(self):
self.id_set = set()
def __call__(self):
idx = sparse_id_randomer()
if idx not in self.id_set:
self.id_set.add(idx)
return idx
else:
return self.__call__()
# SparseValueSlot
def sparse_value_creator(_):
rand = IDRandomer()
return [(rand(), val_randomer()) for _ in xrange(sparse_count_randomer())]
sparse_value = map(sparse_value_creator, range(seq_count_randomer()))
# DenseSlot
def dense_creator(_):
return [val_randomer() for _ in xrange(SPARSE_ID_LIMIT)]
dense = map(dense_creator, range(seq_count_randomer()))
# SparseNonValueSlot
def sparse_creator(_):
rand = IDRandomer()
return [rand() for _ in xrange(sparse_count_randomer())]
sparse_nonvalue = map(sparse_creator, range(seq_count_randomer()))
# IndexSlot
ids = [sparse_id_randomer() for _ in range(seq_count_randomer())]
# StringSlot
def random_str(size = 8, chars=string.ascii_letters + string.digits):
return ''.join(random.choice(chars) for _ in range(size))
strs = [random_str(str_count_randomer()) for _ in range(seq_count_randomer())]
def processSeqAndGenerateDataInit(obj, *args, **kwargs):
obj.json_filename = kwargs.get("load_data_args", "test_data.json")
@provider(slots=[SparseValueSlot(SPARSE_ID_LIMIT), DenseSlot(SPARSE_ID_LIMIT),
SparseNonValueSlot(SPARSE_ID_LIMIT), IndexSlot(SPARSE_ID_LIMIT),
StringSlot(SPARSE_ID_LIMIT)],
use_seq=True, init_hook=processSeqAndGenerateDataInit)
def processSeqAndGenerateData(obj, name):
retv = [sparse_value, dense, sparse_nonvalue, ids, strs]
# Write to protoseq.
with open(obj.json_filename, "w") as f:
json.dump(retv, f)
yield retv
def processSubSeqAndGenerateDataInit(obj, *args, **kwargs):
obj.json_filename = kwargs.get("load_data_args", "test_data.json")
@provider(slots=[SparseValueSlot(SPARSE_ID_LIMIT), DenseSlot(SPARSE_ID_LIMIT),
SparseNonValueSlot(SPARSE_ID_LIMIT), IndexSlot(SPARSE_ID_LIMIT),
StringSlot(SPARSE_ID_LIMIT)],
use_seq=True, init_hook=processSubSeqAndGenerateDataInit)
def processSubSeqAndGenerateData(obj, name):
retv_json = [sparse_value, dense, sparse_nonvalue, ids, strs]
retv_wrapper = [[sparse_value], [dense], [sparse_nonvalue], [ids], [strs]]
# Write to protoseq.
with open(obj.json_filename, "w") as f:
json.dump(retv_json, f)
yield retv_wrapper
if __name__ == "__main__":
pvd = processNonSequenceData("test.txt")
print pvd.getNextBatch(100)
pvd = processSeqAndGenerateData("_")
print pvd.getNextBatch(100)
pvd = processSubSeqAndGenerateData("_")
print pvd.getNextBatch(1)
| 36.642857
| 105
| 0.700455
|
1107fe72dc239ce96485354cd4e90e18a992d66b
| 17,838
|
py
|
Python
|
thirdpart/wsgidav/wsgidav_app.py
|
saukrIppl/newsea
|
0fd5ab2ade9a8fb16b1e7b43ba13dac32eb39603
|
[
"Apache-2.0"
] | 2
|
2017-06-21T09:46:55.000Z
|
2018-05-30T10:07:32.000Z
|
thirdpart/wsgidav/wsgidav_app.py
|
saukrIppl/newsea
|
0fd5ab2ade9a8fb16b1e7b43ba13dac32eb39603
|
[
"Apache-2.0"
] | null | null | null |
thirdpart/wsgidav/wsgidav_app.py
|
saukrIppl/newsea
|
0fd5ab2ade9a8fb16b1e7b43ba13dac32eb39603
|
[
"Apache-2.0"
] | 1
|
2020-10-01T04:11:41.000Z
|
2020-10-01T04:11:41.000Z
|
# (c) 2009-2014 Martin Wendt and contributors; see WsgiDAV https://github.com/mar10/wsgidav
# Original PyFileServer (c) 2005 Ho Chun Wei.
# Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php
"""
WSGI container, that handles the HTTP requests. This object is passed to the
WSGI server and represents our WsgiDAV application to the outside.
On init:
Use the configuration dictionary to initialize lock manager, property manager,
domain controller.
Create a dictionary of share-to-provider mappings.
Initialize middleware objects and RequestResolver and setup the WSGI
application stack.
For every request:
Find the registered DAV provider for the current request.
Add or modify info in the WSGI ``environ``:
environ["SCRIPT_NAME"]
Mount-point of the current share.
environ["PATH_INFO"]
Resource path, relative to the mount path.
environ["wsgidav.provider"]
DAVProvider object that is registered for handling the current
request.
environ["wsgidav.config"]
Configuration dictionary.
environ["wsgidav.verbose"]
Debug level [0-3].
Log the HTTP request, then pass the request to the first middleware.
Note: The OPTIONS method for the '*' path is handled directly.
See `Developers info`_ for more information about the WsgiDAV architecture.
.. _`Developers info`: http://wsgidav.readthedocs.org/en/latest/develop.html
"""
from fs_dav_provider import FilesystemProvider
from wsgidav.dir_browser import WsgiDavDirBrowser
from wsgidav.dav_provider import DAVProvider
from wsgidav.lock_storage import LockStorageDict
import time
import sys
import threading
import urllib
import util
from error_printer import ErrorPrinter
from debug_filter import WsgiDavDebugFilter
from http_authenticator import HTTPAuthenticator
from request_resolver import RequestResolver
from property_manager import PropertyManager
from lock_manager import LockManager
#from wsgidav.version import __version__
__docformat__ = "reStructuredText"
# Use these settings, if config file does not define them (or is totally missing)
DEFAULT_CONFIG = {
"mount_path": None, # Application root, e.g. <mount_path>/<share_name>/<res_path>
"provider_mapping": {},
"host": "localhost",
"port": 8080,
"ext_servers": [
# "paste",
# "cherrypy",
# "wsgiref",
"cherrypy-bundled",
"wsgidav",
],
"add_header_MS_Author_Via": True,
"unquote_path_info": False, # See #8
# "use_text_files": False,
"propsmanager": None, # True: use property_manager.PropertyManager
"locksmanager": True, # True: use lock_manager.LockManager
# HTTP Authentication Options
"user_mapping": {}, # dictionary of dictionaries
"domaincontroller": None, # None: domain_controller.WsgiDAVDomainController(user_mapping)
"acceptbasic": True, # Allow basic authentication, True or False
"acceptdigest": True, # Allow digest authentication, True or False
"defaultdigest": True, # True (default digest) or False (default basic)
# Error printer options
"catchall": False,
"enable_loggers": [
],
# Verbose Output
"verbose": 1, # 0 - no output (excepting application exceptions)
# 1 - show single line request summaries (for HTTP logging)
# 2 - show additional events
# 3 - show full request/response header info (HTTP Logging)
# request body and GET response bodies not shown
"dir_browser": {
"enable": True, # Render HTML listing for GET requests on collections
"response_trailer": "", # Raw HTML code, appended as footer
"davmount": False, # Send <dm:mount> response if request URL contains '?davmount'
"ms_mount": False, # Add an 'open as webfolder' link (requires Windows)
"ms_sharepoint_plugin": True, # Invoke MS Offce documents for editing using WebDAV
"ms_sharepoint_urls": False, # Prepend 'ms-word:ofe|u|' to URL for MS Offce documents
},
"middleware_stack": [
WsgiDavDirBrowser,
HTTPAuthenticator,
ErrorPrinter,
WsgiDavDebugFilter,
]
}
def _checkConfig(config):
mandatoryFields = ["provider_mapping",
]
for field in mandatoryFields:
if not field in config:
raise ValueError("Invalid configuration: missing required field '%s'" % field)
#===============================================================================
# WsgiDAVApp
#===============================================================================
class WsgiDAVApp(object):
def __init__(self, config):
self.config = config
util.initLogging(config["verbose"],
config.get("log_path", ""),
config.get("enable_loggers", []))
util.log("Default encoding: %s (file system: %s)" % (sys.getdefaultencoding(), sys.getfilesystemencoding()))
# Evaluate configuration and set defaults
_checkConfig(config)
provider_mapping = self.config["provider_mapping"]
# response_trailer = config.get("response_trailer", "")
self._verbose = config.get("verbose", 2)
lockStorage = config.get("locksmanager")
if lockStorage is True:
lockStorage = LockStorageDict()
if not lockStorage:
locksManager = None
else:
locksManager = LockManager(lockStorage)
propsManager = config.get("propsmanager")
if not propsManager:
# Normalize False, 0 to None
propsManager = None
elif propsManager is True:
propsManager = PropertyManager()
mount_path = config.get("mount_path")
# Instantiate DAV resource provider objects for every share
self.providerMap = {}
for (share, provider) in provider_mapping.items():
# Make sure share starts with, or is, '/'
share = "/" + share.strip("/")
# We allow a simple string as 'provider'. In this case we interpret
# it as a file system root folder that is published.
if isinstance(provider, basestring):
provider = FilesystemProvider(provider)
assert isinstance(provider, DAVProvider)
provider.setSharePath(share)
if mount_path:
provider.setMountPath(mount_path)
# TODO: someday we may want to configure different lock/prop managers per provider
provider.setLockManager(locksManager)
provider.setPropManager(propsManager)
self.providerMap[share] = {"provider": provider, "allow_anonymous": False}
# Define WSGI application stack
application = RequestResolver()
domain_controller = None
dir_browser = config.get("dir_browser", {})
middleware_stack = config.get("middleware_stack", [])
# Replace WsgiDavDirBrowser to custom class for backward compatibility only
# In normal way you should insert it into middleware_stack
if dir_browser.get("enable", True) and "app_class" in dir_browser.keys():
config["middleware_stack"] = [m if m != WsgiDavDirBrowser else dir_browser['app_class'] for m in middleware_stack]
for mw in middleware_stack:
if mw.isSuitable(config):
if self._verbose >= 2:
print "Middleware %s is suitable" % mw
application = mw(application, config)
if issubclass(mw, HTTPAuthenticator):
domain_controller = application.getDomainController()
# check anonymous access
for share, data in self.providerMap.items():
if application.allowAnonymousAccess(share):
data['allow_anonymous'] = True
else:
if self._verbose >= 2:
print "Middleware %s is not suitable" % mw
# Print info
if self._verbose >= 2:
print "Using lock manager: %r" % locksManager
print "Using property manager: %r" % propsManager
print "Using domain controller: %s" % domain_controller
print "Registered DAV providers:"
for share, data in self.providerMap.items():
hint = " (anonymous)" if data['allow_anonymous'] else ""
print " Share '%s': %s%s" % (share, provider, hint)
if self._verbose >= 1:
for share, data in self.providerMap.items():
if data['allow_anonymous']:
# TODO: we should only warn here, if --no-auth is not given
print "WARNING: share '%s' will allow anonymous access." % share
self._application = application
def __call__(self, environ, start_response):
# util.log("SCRIPT_NAME='%s', PATH_INFO='%s'" % (environ.get("SCRIPT_NAME"), environ.get("PATH_INFO")))
# We optionall unquote PATH_INFO here, although this should already be
# done by the server (#8).
path = environ["PATH_INFO"]
if self.config.get("unquote_path_info", False):
path = urllib.unquote(environ["PATH_INFO"])
# GC issue 22: Pylons sends root as u'/'
if isinstance(path, unicode):
util.log("Got unicode PATH_INFO: %r" % path)
path = path.encode("utf8")
# Always adding these values to environ:
environ["wsgidav.config"] = self.config
environ["wsgidav.provider"] = None
environ["wsgidav.verbose"] = self._verbose
## Find DAV provider that matches the share
# sorting share list by reverse length
shareList = self.providerMap.keys()
shareList.sort(key=len, reverse=True)
share = None
for r in shareList:
# @@: Case sensitivity should be an option of some sort here;
# os.path.normpath might give the preferred case for a filename.
if r == "/":
share = r
break
elif path.upper() == r.upper() or path.upper().startswith(r.upper()+"/"):
share = r
break
share_data = self.providerMap.get(share)
# Note: we call the next app, even if provider is None, because OPTIONS
# must still be handled.
# All other requests will result in '404 Not Found'
environ["wsgidav.provider"] = share_data['provider']
# TODO: test with multi-level realms: 'aa/bb'
# TODO: test security: url contains '..'
# Transform SCRIPT_NAME and PATH_INFO
# (Since path and share are unquoted, this also fixes quoted values.)
if share == "/" or not share:
environ["PATH_INFO"] = path
else:
environ["SCRIPT_NAME"] += share
environ["PATH_INFO"] = path[len(share):]
# util.log("--> SCRIPT_NAME='%s', PATH_INFO='%s'" % (environ.get("SCRIPT_NAME"), environ.get("PATH_INFO")))
assert isinstance(path, str)
# See http://mail.python.org/pipermail/web-sig/2007-January/002475.html
# for some clarification about SCRIPT_NAME/PATH_INFO format
# SCRIPT_NAME starts with '/' or is empty
assert environ["SCRIPT_NAME"] == "" or environ["SCRIPT_NAME"].startswith("/")
# SCRIPT_NAME must not have a trailing '/'
assert environ["SCRIPT_NAME"] in ("", "/") or not environ["SCRIPT_NAME"].endswith("/")
# PATH_INFO starts with '/'
assert environ["PATH_INFO"] == "" or environ["PATH_INFO"].startswith("/")
start_time = time.time()
def _start_response_wrapper(status, response_headers, exc_info=None):
# Postprocess response headers
headerDict = {}
for header, value in response_headers:
if header.lower() in headerDict:
util.warn("Duplicate header in response: %s" % header)
headerDict[header.lower()] = value
# Check if we should close the connection after this request.
# http://www.w3.org/Protocols/rfc2616/rfc2616-sec4.html#sec4.4
forceCloseConnection = False
currentContentLength = headerDict.get("content-length")
statusCode = int(status.split(" ", 1)[0])
contentLengthRequired = (environ["REQUEST_METHOD"] != "HEAD"
and statusCode >= 200
and not statusCode in (204, 304))
# print environ["REQUEST_METHOD"], statusCode, contentLengthRequired
if contentLengthRequired and currentContentLength in (None, ""):
# A typical case: a GET request on a virtual resource, for which
# the provider doesn't know the length
util.warn("Missing required Content-Length header in %s-response: closing connection" % statusCode)
forceCloseConnection = True
elif not type(currentContentLength) is str:
util.warn("Invalid Content-Length header in response (%r): closing connection" % headerDict.get("content-length"))
forceCloseConnection = True
# HOTFIX for Vista and Windows 7 (GC issue 13, issue 23)
# It seems that we must read *all* of the request body, otherwise
# clients may miss the response.
# For example Vista MiniRedir didn't understand a 401 response,
# when trying an anonymous PUT of big files. As a consequence, it
# doesn't retry with credentials and the file copy fails.
# (XP is fine however).
util.readAndDiscardInput(environ)
# Make sure the socket is not reused, unless we are 100% sure all
# current input was consumed
if(util.getContentLength(environ) != 0
and not environ.get("wsgidav.all_input_read")):
util.warn("Input stream not completely consumed: closing connection")
forceCloseConnection = True
if forceCloseConnection and headerDict.get("connection") != "close":
util.warn("Adding 'Connection: close' header")
response_headers.append(("Connection", "close"))
# Log request
if self._verbose >= 1:
userInfo = environ.get("http_authenticator.username")
if not userInfo:
userInfo = "(anonymous)"
threadInfo = ""
if self._verbose >= 1:
threadInfo = "<%s> " % threading._get_ident()
extra = []
if "HTTP_DESTINATION" in environ:
extra.append('dest="%s"' % environ.get("HTTP_DESTINATION"))
if environ.get("CONTENT_LENGTH", "") != "":
extra.append("length=%s" % environ.get("CONTENT_LENGTH"))
if "HTTP_DEPTH" in environ:
extra.append("depth=%s" % environ.get("HTTP_DEPTH"))
if "HTTP_RANGE" in environ:
extra.append("range=%s" % environ.get("HTTP_RANGE"))
if "HTTP_OVERWRITE" in environ:
extra.append("overwrite=%s" % environ.get("HTTP_OVERWRITE"))
if self._verbose >= 1 and "HTTP_EXPECT" in environ:
extra.append('expect="%s"' % environ.get("HTTP_EXPECT"))
if self._verbose >= 2 and "HTTP_CONNECTION" in environ:
extra.append('connection="%s"' % environ.get("HTTP_CONNECTION"))
if self._verbose >= 2 and "HTTP_USER_AGENT" in environ:
extra.append('agent="%s"' % environ.get("HTTP_USER_AGENT"))
if self._verbose >= 2 and "HTTP_TRANSFER_ENCODING" in environ:
extra.append('transfer-enc=%s' % environ.get("HTTP_TRANSFER_ENCODING"))
if self._verbose >= 1:
extra.append('elap=%.3fsec' % (time.time() - start_time))
extra = ", ".join(extra)
util.log('%s - %s - "%s" %s -> %s' % (
environ.get("REMOTE_ADDR",""),
userInfo,
environ.get("REQUEST_METHOD") + " " + environ.get("PATH_INFO", ""),
extra,
status
))
return start_response(status, response_headers, exc_info)
# Call next middleware
app_iter = self._application(environ, _start_response_wrapper)
for v in app_iter:
yield v
if hasattr(app_iter, "close"):
app_iter.close()
return
| 44.263027
| 131
| 0.56531
|
416ec46bedc43ac213a4e09e41649cff261f23db
| 4,107
|
py
|
Python
|
Gena/basic_image.py
|
mllzl/earthengine-py-notebooks
|
cade6a81dd4dbbfb1b9b37aaf6955de42226cfc5
|
[
"MIT"
] | 1
|
2020-03-26T04:21:15.000Z
|
2020-03-26T04:21:15.000Z
|
Gena/basic_image.py
|
mllzl/earthengine-py-notebooks
|
cade6a81dd4dbbfb1b9b37aaf6955de42226cfc5
|
[
"MIT"
] | null | null | null |
Gena/basic_image.py
|
mllzl/earthengine-py-notebooks
|
cade6a81dd4dbbfb1b9b37aaf6955de42226cfc5
|
[
"MIT"
] | null | null | null |
# %%
"""
<table class="ee-notebook-buttons" align="left">
<td><a target="_blank" href="https://github.com/giswqs/earthengine-py-notebooks/tree/master/Gena/basic_image.ipynb"><img width=32px src="https://www.tensorflow.org/images/GitHub-Mark-32px.png" /> View source on GitHub</a></td>
<td><a target="_blank" href="https://nbviewer.jupyter.org/github/giswqs/earthengine-py-notebooks/blob/master/Gena/basic_image.ipynb"><img width=26px src="https://upload.wikimedia.org/wikipedia/commons/thumb/3/38/Jupyter_logo.svg/883px-Jupyter_logo.svg.png" />Notebook Viewer</a></td>
<td><a target="_blank" href="https://mybinder.org/v2/gh/giswqs/earthengine-py-notebooks/master?filepath=Gena/basic_image.ipynb"><img width=58px src="https://mybinder.org/static/images/logo_social.png" />Run in binder</a></td>
<td><a target="_blank" href="https://colab.research.google.com/github/giswqs/earthengine-py-notebooks/blob/master/Gena/basic_image.ipynb"><img src="https://www.tensorflow.org/images/colab_logo_32px.png" /> Run in Google Colab</a></td>
</table>
"""
# %%
"""
## Install Earth Engine API and geemap
Install the [Earth Engine Python API](https://developers.google.com/earth-engine/python_install) and [geemap](https://github.com/giswqs/geemap). The **geemap** Python package is built upon the [ipyleaflet](https://github.com/jupyter-widgets/ipyleaflet) and [folium](https://github.com/python-visualization/folium) packages and implements several methods for interacting with Earth Engine data layers, such as `Map.addLayer()`, `Map.setCenter()`, and `Map.centerObject()`.
The following script checks if the geemap package has been installed. If not, it will install geemap, which automatically installs its [dependencies](https://github.com/giswqs/geemap#dependencies), including earthengine-api, folium, and ipyleaflet.
**Important note**: A key difference between folium and ipyleaflet is that ipyleaflet is built upon ipywidgets and allows bidirectional communication between the front-end and the backend enabling the use of the map to capture user input, while folium is meant for displaying static data only ([source](https://blog.jupyter.org/interactive-gis-in-jupyter-with-ipyleaflet-52f9657fa7a)). Note that [Google Colab](https://colab.research.google.com/) currently does not support ipyleaflet ([source](https://github.com/googlecolab/colabtools/issues/60#issuecomment-596225619)). Therefore, if you are using geemap with Google Colab, you should use [`import geemap.eefolium`](https://github.com/giswqs/geemap/blob/master/geemap/eefolium.py). If you are using geemap with [binder](https://mybinder.org/) or a local Jupyter notebook server, you can use [`import geemap`](https://github.com/giswqs/geemap/blob/master/geemap/geemap.py), which provides more functionalities for capturing user input (e.g., mouse-clicking and moving).
"""
# %%
# Installs geemap package
import subprocess
try:
import geemap
except ImportError:
print('geemap package not installed. Installing ...')
subprocess.check_call(["python", '-m', 'pip', 'install', 'geemap'])
# Checks whether this notebook is running on Google Colab
try:
import google.colab
import geemap.eefolium as emap
except:
import geemap as emap
# Authenticates and initializes Earth Engine
import ee
try:
ee.Initialize()
except Exception as e:
ee.Authenticate()
ee.Initialize()
# %%
"""
## Create an interactive map
The default basemap is `Google Satellite`. [Additional basemaps](https://github.com/giswqs/geemap/blob/master/geemap/geemap.py#L13) can be added using the `Map.add_basemap()` function.
"""
# %%
Map = emap.Map(center=[40,-100], zoom=4)
Map.add_basemap('ROADMAP') # Add Google Map
Map
# %%
"""
## Add Earth Engine Python script
"""
# %%
# Add Earth Engine dataset
image = ee.Image.pixelLonLat() \
.add([180, 90]).divide([360, 180])
# image = image.multiply(50).sin()
Map.setCenter(0, 28, 2.5)
Map.addLayer(image, {}, 'coords', True)
# %%
"""
## Display Earth Engine data layers
"""
# %%
Map.addLayerControl() # This line is not needed for ipyleaflet-based Map.
Map
| 51.3375
| 1,021
| 0.741904
|
4dec35474eeaa2bc1224bd401c81a2370cfd3952
| 1,848
|
py
|
Python
|
ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SQOOP/package/scripts/sqoop_client.py
|
kuhella/ambari
|
9396c17b0305665d31d7a4f4525be857958b5d4c
|
[
"Apache-2.0"
] | null | null | null |
ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SQOOP/package/scripts/sqoop_client.py
|
kuhella/ambari
|
9396c17b0305665d31d7a4f4525be857958b5d4c
|
[
"Apache-2.0"
] | null | null | null |
ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SQOOP/package/scripts/sqoop_client.py
|
kuhella/ambari
|
9396c17b0305665d31d7a4f4525be857958b5d4c
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
"""
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import sys
from resource_management import *
from resource_management.libraries.functions import conf_select
from resource_management.libraries.functions import stack_select
from resource_management.libraries.functions.version import compare_versions, format_stack_version
from sqoop import sqoop
class SqoopClient(Script):
def get_component_name(self):
return "sqoop-client"
def pre_rolling_restart(self, env):
import params
env.set_params(params)
if params.version and compare_versions(format_stack_version(params.version), '4.0.0.0') >= 0:
conf_select.select(params.stack_name, "sqoop", params.version)
stack_select.select("sqoop-client", params.version)
#Execute(format("iop-select set sqoop-client {version}"))
def install(self, env):
self.install_packages(env)
self.configure(env)
def configure(self, env):
import params
env.set_params(params)
sqoop(type='client')
def status(self, env):
raise ClientComponentHasNoStatus()
if __name__ == "__main__":
SqoopClient().execute()
| 31.862069
| 98
| 0.770022
|
57e5209234324ff0c35c29950b895839171621f9
| 4,959
|
py
|
Python
|
examples/flax_mnist.py
|
tachukao/jaxopt
|
dae2f66a2e5899ade8032a2dd13609acd371d4de
|
[
"Apache-2.0"
] | null | null | null |
examples/flax_mnist.py
|
tachukao/jaxopt
|
dae2f66a2e5899ade8032a2dd13609acd371d4de
|
[
"Apache-2.0"
] | null | null | null |
examples/flax_mnist.py
|
tachukao/jaxopt
|
dae2f66a2e5899ade8032a2dd13609acd371d4de
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
MNIST example with Flax and JAXopt.
===================================
"""
from absl import app
from absl import flags
from flax import linen as nn
import jax
import jax.numpy as jnp
from jaxopt import loss
from jaxopt import OptaxSolver
from jaxopt import PolyakSGD
from jaxopt import tree_util
import optax
import tensorflow_datasets as tfds
flags.DEFINE_float("l2reg", 1e-4, "L2 regularization.")
flags.DEFINE_float("learning_rate", 0.001, "Learning rate (used in adam).")
flags.DEFINE_bool("manual_loop", False, "Whether to use a manual training loop.")
flags.DEFINE_integer("maxiter", 100, "Maximum number of iterations.")
flags.DEFINE_float("max_step_size", 0.1, "Maximum step size (used in polyak-sgd).")
flags.DEFINE_float("momentum", 0.9, "Momentum strength (used in adam, polyak-sgd).")
flags.DEFINE_enum("solver", "adam", ["adam", "sgd", "polyak-sgd"], "Solver to use.")
FLAGS = flags.FLAGS
def load_dataset(split, *, is_training, batch_size):
"""Loads the dataset as a generator of batches."""
ds = tfds.load("mnist:3.*.*", split=split).cache().repeat()
if is_training:
ds = ds.shuffle(10 * batch_size, seed=0)
ds = ds.batch(batch_size)
return iter(tfds.as_numpy(ds))
class CNN(nn.Module):
"""A simple CNN model."""
@nn.compact
def __call__(self, x):
x = nn.Conv(features=32, kernel_size=(3, 3))(x)
x = nn.relu(x)
x = nn.avg_pool(x, window_shape=(2, 2), strides=(2, 2))
x = nn.Conv(features=64, kernel_size=(3, 3))(x)
x = nn.relu(x)
x = nn.avg_pool(x, window_shape=(2, 2), strides=(2, 2))
x = x.reshape((x.shape[0], -1)) # flatten
x = nn.Dense(features=256)(x)
x = nn.relu(x)
x = nn.Dense(features=10)(x)
return x
net = CNN()
@jax.jit
def accuracy(params, data):
x = data["image"].astype(jnp.float32) / 255.
logits = net.apply({"params": params}, x)
return jnp.mean(jnp.argmax(logits, axis=-1) == data["label"])
logistic_loss = jax.vmap(loss.multiclass_logistic_loss)
def loss_fun(params, l2reg, data):
"""Compute the loss of the network."""
x = data["image"].astype(jnp.float32) / 255.
logits = net.apply({"params": params}, x)
labels = data["label"]
sqnorm = tree_util.tree_l2_norm(params, squared=True)
loss_value = jnp.mean(logistic_loss(labels, logits))
return loss_value + 0.5 * l2reg * sqnorm
def main(argv):
del argv
train_ds = load_dataset("train", is_training=True, batch_size=1000)
test_ds = load_dataset("test", is_training=False, batch_size=10000)
def pre_update(params, state, *args, **kwargs):
if state.iter_num % 10 == 0:
# Periodically evaluate classification accuracy on test set.
test_accuracy = accuracy(params, next(test_ds))
test_accuracy = jax.device_get(test_accuracy)
print(f"[Step {state.iter_num}] Test accuracy: {test_accuracy:.3f}.")
return params, state
# Initialize solver and parameters.
if FLAGS.solver == "adam":
solver = OptaxSolver(opt=optax.adam(1e-3), fun=loss_fun,
maxiter=FLAGS.maxiter, pre_update=pre_update)
elif FLAGS.solver == "sgd":
opt = optax.sgd(FLAGS.learning_rate, FLAGS.momentum)
solver = OptaxSolver(opt=opt, fun=loss_fun,
maxiter=FLAGS.maxiter, pre_update=pre_update)
elif FLAGS.solver == "polyak-sgd":
solver = PolyakSGD(fun=loss_fun, maxiter=FLAGS.maxiter,
momentum=FLAGS.momentum,
max_step_size=FLAGS.max_step_size,
pre_update=pre_update)
else:
raise ValueError("Unknown solver: %s" % FLAGS.solver)
rng = jax.random.PRNGKey(0)
init_params = CNN().init(rng, jnp.ones([1, 28, 28, 1]))["params"]
# Run training loop.
# In JAXopt, stochastic solvers can be run either using a manual for loop or
# using `run_iterator`. We include both here for demonstration purpose.
if FLAGS.manual_loop:
params, state = solver.init(init_params)
for _ in range(FLAGS.maxiter):
params, state = pre_update(params=params, state=state)
params, state = solver.update(params=params, state=state,
l2reg=FLAGS.l2reg,
data=next(train_ds))
else:
solver.run_iterator(init_params=init_params,
iterator=train_ds,
l2reg=FLAGS.l2reg)
if __name__ == "__main__":
app.run(main)
| 31.993548
| 84
| 0.666062
|
9f5fb1854328e658f9b083d3e0d313416295726b
| 3,179
|
py
|
Python
|
Vision/owlbot.py
|
nick-lai/google-cloud-php
|
52130ee60f03c61ef0ada04c31b1268af87bacb6
|
[
"Apache-2.0"
] | 524
|
2018-09-26T02:29:52.000Z
|
2022-03-30T12:57:26.000Z
|
Vision/owlbot.py
|
nick-lai/google-cloud-php
|
52130ee60f03c61ef0ada04c31b1268af87bacb6
|
[
"Apache-2.0"
] | 2,028
|
2018-09-20T22:35:42.000Z
|
2022-03-31T18:13:07.000Z
|
Vision/owlbot.py
|
nick-lai/google-cloud-php
|
52130ee60f03c61ef0ada04c31b1268af87bacb6
|
[
"Apache-2.0"
] | 262
|
2018-10-02T15:43:46.000Z
|
2022-03-29T19:37:04.000Z
|
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This script is used to synthesize generated parts of this library."""
import logging
from pathlib import Path
import synthtool as s
import subprocess
from synthtool.languages import php
from synthtool import _tracked_paths
logging.basicConfig(level=logging.DEBUG)
src = Path(f"../{php.STAGING_DIR}/Vision").resolve()
dest = Path().resolve()
# Added so that we can pass copy_excludes in the owlbot_main() call
_tracked_paths.add(src)
# Exclude gapic_metadata.json and partial veneer files.
php.owlbot_main(
src=src,
dest=dest,
copy_excludes=[
src / "*/src/*/gapic_metadata.json",
src / "*/src/*/*.php"
]
)
# document and utilize apiEndpoint instead of serviceAddress
s.replace(
"**/Gapic/*GapicClient.php",
r"'serviceAddress' =>",
r"'apiEndpoint' =>")
s.replace(
"**/Gapic/*GapicClient.php",
r"@type string \$serviceAddress\n\s+\*\s+The address",
r"""@type string $serviceAddress
* **Deprecated**. This option will be removed in a future major release. Please
* utilize the `$apiEndpoint` option instead.
* @type string $apiEndpoint
* The address""")
s.replace(
"**/Gapic/*GapicClient.php",
r"\$transportConfig, and any \$serviceAddress",
r"$transportConfig, and any `$apiEndpoint`")
# V1 is GA, so remove @experimental tags
s.replace(
'src/V1/**/*Client.php',
r'^(\s+\*\n)?\s+\*\s@experimental\n',
'')
# Change the wording for the deprecation warning.
s.replace(
'src/*/*_*.php',
r'will be removed in the next major release',
'will be removed in a future release')
### [START] protoc backwards compatibility fixes
# roll back to private properties.
s.replace(
"src/**/V*/**/*.php",
r"Generated from protobuf field ([^\n]{0,})\n\s{5}\*/\n\s{4}protected \$",
r"""Generated from protobuf field \1
*/
private $""")
# prevent proto messages from being marked final
s.replace(
"src/**/V*/**/*.php",
r"final class",
r"class")
# Replace "Unwrapped" with "Value" for method names.
s.replace(
"src/**/V*/**/*.php",
r"public function ([s|g]\w{3,})Unwrapped",
r"public function \1Value"
)
### [END] protoc backwards compatibility fixes
# fix relative cloud.google.com links
s.replace(
"src/**/V*/**/*.php",
r"(.{0,})\]\((/.{0,})\)",
r"\1](https://cloud.google.com\2)"
)
# format generated clients
subprocess.run([
'npx',
'-y',
'-p',
'@prettier/plugin-php@^0.16',
'prettier',
'**/Gapic/*',
'--write',
'--parser=php',
'--single-quote',
'--print-width=80'])
| 27.405172
| 94
| 0.647688
|
cba2b2a490b2c4ee3e26343c2c566b7482092d71
| 576
|
py
|
Python
|
login.py
|
Entropy03/linyi
|
1e5f924c217095d6757e29cac128e5ac5085ec11
|
[
"MIT"
] | null | null | null |
login.py
|
Entropy03/linyi
|
1e5f924c217095d6757e29cac128e5ac5085ec11
|
[
"MIT"
] | null | null | null |
login.py
|
Entropy03/linyi
|
1e5f924c217095d6757e29cac128e5ac5085ec11
|
[
"MIT"
] | null | null | null |
import ssl
import sys
import urllib2
import random
import httplib
import json
from cookielib import LWPCookieJar
import urllib
import re
import getpass
class lgoin :
def __init__(self) :
self.logindata = {}
self.username ='china199123@163.com'
self.password ='ZAQjay12306'
self.randcode = ''
#在http交互中即时更新cookie
self.cookiejar = LWPCookieJar()
cookiesupport = urllib2.HTTPCookieProcessor(self.cookiejar)
opener = urllib2.build_opener(cookiesupport, urllib2.HTTPHandler)
urllib2.install_opener(opener)
def getrandcode(self):
| 23.04
| 73
| 0.743056
|
b21abd96d0480f750f5b0089725872ee5225a01f
| 4,430
|
py
|
Python
|
blastsight/view/drawables/gldrawable.py
|
gsanhueza/BlastSight
|
4b5c48e7ea5f67b737429f05d5213e9ff1fd399d
|
[
"MIT"
] | null | null | null |
blastsight/view/drawables/gldrawable.py
|
gsanhueza/BlastSight
|
4b5c48e7ea5f67b737429f05d5213e9ff1fd399d
|
[
"MIT"
] | 1
|
2022-03-13T17:35:35.000Z
|
2022-03-13T17:35:35.000Z
|
blastsight/view/drawables/gldrawable.py
|
gsanhueza/BlastSight
|
4b5c48e7ea5f67b737429f05d5213e9ff1fd399d
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# Copyright (c) 2019-2021 Gabriel Sanhueza.
#
# Distributed under the MIT License.
# See LICENSE for more info.
import numpy as np
from OpenGL.GL import *
class GLDrawable:
def __init__(self, element, *args, **kwargs):
# assert element
super().__setattr__('element', element) # self.element = element
self._vaos = []
self._vbos = []
self._observers = []
self._is_initialized = kwargs.pop('initialized', False)
self._is_visible = kwargs.pop('visible', True)
self._is_boostable = kwargs.pop('turbo', False)
self._is_cross_sectioned = kwargs.pop('cross_section', False)
# Note: The following "hacks" are shortened versions of Delegator Pattern.
# They're convenient, but optional.
#
# Example:
# d = GLDrawable(element, *args, **kwargs)
# assert d.alpha is d.element.alpha => True
def __dir__(self) -> list:
# Hack to expose GLDrawable's attributes AND self.element's attributes
# as if they were GLDrawable's attributes.
# https://stackoverflow.com/q/15507848
return list(set(super().__dir__() + dir(self.element)))
def __getattribute__(self, attr: str) -> any:
# Hack to get our attributes.
# If not found, search self.element's attributes.
# https://stackoverflow.com/a/2405617
if hasattr(type(self), attr) or attr in super().__getattribute__('__dict__'):
return super().__getattribute__(attr)
return super().__getattribute__('element').__getattribute__(attr)
def __setattr__(self, key, value) -> None:
# Hack to set our attributes.
# We'll try to set our element's attribute first, then ourselves.
# https://stackoverflow.com/a/7042247
if key in dir(self.element):
self.element.__setattr__(key, value)
else:
super().__setattr__(key, value)
@property
def vao(self) -> int:
# We already know that we have only one VAO.
# But cleanup is easier if we have the VAO in a list.
return self._vaos[-1]
def initialize(self) -> None:
if self.is_initialized:
return
self.generate_buffers()
self.setup_attributes()
self.is_initialized = True
def reload(self) -> None:
self.is_initialized = False
self.initialize()
def setup_attributes(self) -> None:
pass
def generate_buffers(self) -> None:
pass
@staticmethod
def fill_buffer(pointer, basesize, array, glsize, gltype, vbo):
glBindBuffer(GL_ARRAY_BUFFER, vbo)
glBufferData(GL_ARRAY_BUFFER, sizeof(glsize) * array.size, array, GL_STATIC_DRAW)
glVertexAttribPointer(pointer, basesize, gltype, False, 0, None)
glEnableVertexAttribArray(pointer)
def draw(self) -> None:
pass
def cleanup(self) -> None:
if self._is_initialized:
glDeleteBuffers(len(self._vbos), self._vbos)
glDeleteVertexArrays(len(self._vaos), self._vaos)
"""
Properties
"""
@property
def is_initialized(self) -> bool:
return self._is_initialized
@property
def is_visible(self) -> bool:
return self._is_visible
@property
def is_boostable(self) -> bool:
return self._is_boostable
@property
def is_cross_sectioned(self) -> bool:
return self._is_cross_sectioned
@is_initialized.setter
def is_initialized(self, status: bool) -> None:
self._is_initialized = status
@is_visible.setter
def is_visible(self, status: bool) -> None:
self._is_visible = status
self.notify()
@is_boostable.setter
def is_boostable(self, status: bool) -> None:
self._is_boostable = status
self.notify()
@is_cross_sectioned.setter
def is_cross_sectioned(self, status: bool) -> None:
self._is_cross_sectioned = status
self.notify()
"""
Quick GLDrawable API
"""
def add_observer(self, observer) -> None:
self._observers.append(observer)
def notify(self) -> None:
for observer in self._observers:
observer.recreate()
def show(self) -> None:
self.is_visible = True
def hide(self) -> None:
self.is_visible = False
def toggle_visibility(self) -> None:
self.is_visible = not self.is_visible
| 29.337748
| 89
| 0.631377
|
48d2cf4f5a8692ae6171d29853d355c5beed1073
| 2,508
|
py
|
Python
|
examples/Raspberry_Pi_Pico_4x4_Macropad_v2/code.py
|
Mindplague/pykey
|
e069ccdacd470d8d27ad1b701f333ea8e118fc49
|
[
"MIT"
] | 7
|
2021-10-13T10:18:42.000Z
|
2022-03-03T03:51:58.000Z
|
examples/Raspberry_Pi_Pico_4x4_Macropad_v2/code.py
|
Mindplague/pykey
|
e069ccdacd470d8d27ad1b701f333ea8e118fc49
|
[
"MIT"
] | 3
|
2021-10-13T00:35:04.000Z
|
2021-10-15T00:19:31.000Z
|
examples/Raspberry_Pi_Pico_4x4_Macropad_v2/code.py
|
Mindplague/pykey
|
e069ccdacd470d8d27ad1b701f333ea8e118fc49
|
[
"MIT"
] | 3
|
2021-10-13T02:55:51.000Z
|
2022-02-10T05:01:08.000Z
|
# SPDX-FileCopyrightText: 2021 Pierre Constantineau
# SPDX-License-Identifier: MIT
# Raspberry Pi Pico 4x4 Macropad
"""
Code adapted from the following sources:
MACROPAD Hotkey (https://learn.adafruit.com/macropad-hotkeys/project-code)
Pico Four Keypad (https://learn.adafruit.com/pico-four-key-macropad/code-the-four-keypad)
"""
import board
import keypad
import usb_hid
from adafruit_hid.keyboard import Keyboard
from adafruit_hid.keycode import Keycode
from adafruit_hid.keyboard_layout_us import KeyboardLayoutUS
kpd = Keyboard(usb_hid.devices)
keyboard_layout = KeyboardLayoutUS(kpd)
# define keys for 4x4 v2
keys = keypad.Keys(
pins=( board.GP3, board.GP4, board.GP21, board.GP22,
board.GP6, board.GP5, board.GP20, board.GP19,
board.GP7, board.GP8, board.GP17, board.GP18,
board.GP10,board.GP9, board.GP16, board.GP15,
),
value_when_pressed=False
)
keymap = [
("Select all", [Keycode.LEFT_CONTROL, Keycode.A]),
("Cut", [Keycode.LEFT_CONTROL, Keycode.X]),
("Copy", [Keycode.LEFT_CONTROL, Keycode.C]),
("Paste", [Keycode.LEFT_CONTROL, Keycode.V]),
("Hello World", "Hello World"),
("Cut", [Keycode.LEFT_CONTROL, Keycode.X]),
("Copy", [Keycode.LEFT_CONTROL, Keycode.C]),
("Paste", [Keycode.LEFT_CONTROL, Keycode.V]),
("Select all", [Keycode.LEFT_CONTROL, Keycode.A]),
("Cut", [Keycode.LEFT_CONTROL, Keycode.X]),
("Copy", [Keycode.LEFT_CONTROL, Keycode.C]),
("Paste", [Keycode.LEFT_CONTROL, Keycode.V]),
("Select all", [Keycode.LEFT_CONTROL, Keycode.A]),
("Cut", [Keycode.LEFT_CONTROL, Keycode.X]),
("Copy", [Keycode.LEFT_CONTROL, Keycode.C]),
("Paste", [Keycode.LEFT_CONTROL, Keycode.V]),
]
print("keymap:")
for key in keymap:
print("\t", key[0])
while True:
key_event = keys.events.get()
if key_event:
print(key_event)
if key_event.pressed:
print(keymap[key_event.key_number][0])
sequence = keymap[key_event.key_number][1]
for item in sequence:
if isinstance(item, int):
if item >= 0:
kpd.press(item)
else:
kpd.release(-item)
else:
keyboard_layout.write(item)
else:
# Release any still-pressed modifier keys
for item in sequence:
if isinstance(item, int) and item >= 0:
kpd.release(item)
| 34.356164
| 94
| 0.621212
|
5875e307d8f1064648010f07a0c213ad36c41c14
| 11,238
|
py
|
Python
|
src/datasets/utils/streaming_download_manager.py
|
jimregan/datasets
|
fc46bba66ba4f432cc10501c16a677112e13984c
|
[
"Apache-2.0"
] | null | null | null |
src/datasets/utils/streaming_download_manager.py
|
jimregan/datasets
|
fc46bba66ba4f432cc10501c16a677112e13984c
|
[
"Apache-2.0"
] | null | null | null |
src/datasets/utils/streaming_download_manager.py
|
jimregan/datasets
|
fc46bba66ba4f432cc10501c16a677112e13984c
|
[
"Apache-2.0"
] | null | null | null |
import os
import re
import time
from pathlib import Path, PurePosixPath
from typing import Optional, Tuple
import fsspec
import posixpath
from aiohttp.client_exceptions import ClientError
from .. import config
from ..filesystems import COMPRESSION_FILESYSTEMS
from .download_manager import DownloadConfig, map_nested
from .file_utils import get_authentication_headers_for_url, is_local_path, is_relative_path, url_or_path_join
from .logging import get_logger
logger = get_logger(__name__)
BASE_KNOWN_EXTENSIONS = ["txt", "csv", "json", "jsonl", "tsv", "conll", "conllu", "parquet", "pkl", "pickle", "xml"]
COMPRESSION_EXTENSION_TO_PROTOCOL = {
# single file compression
**{fs_class.extension.lstrip("."): fs_class.protocol for fs_class in COMPRESSION_FILESYSTEMS},
# archive compression
"zip": "zip",
"tar": "tar",
"tgz": "tar",
}
SINGLE_FILE_COMPRESSION_PROTOCOLS = {fs_class.protocol for fs_class in COMPRESSION_FILESYSTEMS}
SINGLE_SLASH_AFTER_PROTOCOL_PATTERN = re.compile(r"(?<!:):/")
def xjoin(a, *p):
"""
This function extends os.path.join to support the "::" hop separator. It supports both paths and urls.
A shorthand, particularly useful where you have multiple hops, is to “chain” the URLs with the special separator "::".
This is used to access files inside a zip file over http for example.
Let's say you have a zip file at https://host.com/archive.zip, and you want to access the file inside the zip file at /folder1/file.txt.
Then you can just chain the url this way:
zip://folder1/file.txt::https://host.com/archive.zip
The xjoin function allows you to apply the join on the first path of the chain.
Example::
>>> xjoin("zip://folder1::https://host.com/archive.zip", "file.txt")
zip://folder1/file.txt::https://host.com/archive.zip
"""
a, *b = a.split("::")
if is_local_path(a):
a = Path(a, *p).as_posix()
else:
a = posixpath.join(a, *p)
return "::".join([a] + b)
def xdirname(a, *p):
"""
This function extends os.path.dirname to support the "::" hop separator. It supports both paths and urls.
A shorthand, particularly useful where you have multiple hops, is to “chain” the URLs with the special separator "::".
This is used to access files inside a zip file over http for example.
Let's say you have a zip file at https://host.com/archive.zip, and you want to access the file inside the zip file at /folder1/file.txt.
Then you can just chain the url this way:
zip://folder1/file.txt::https://host.com/archive.zip
The xdirname function allows you to apply the dirname on the first path of the chain.
Example::
>>> xdirname("zip://folder1/file.txt::https://host.com/archive.zip")
zip://folder1::https://host.com/archive.zip
"""
a, *b = a.split("::")
if is_local_path(a):
a = os.path.dirname(Path(a).as_posix())
else:
a = posixpath.dirname(a)
# if we end up at the root of the protocol, we get for example a = 'http:'
# so we have to fix it by adding the '//' that was removed:
if a.endswith(":"):
a += "//"
return "::".join([a] + b)
def _as_posix(path: Path):
"""Extend :meth:`pathlib.PurePath.as_posix` to fix missing slashes after protocol.
Args:
path (:obj:`~pathlib.Path`): Calling Path instance.
Returns:
obj:`str`
"""
path_as_posix = path.as_posix()
path_as_posix = SINGLE_SLASH_AFTER_PROTOCOL_PATTERN.sub("://", path_as_posix)
path_as_posix += "//" if path_as_posix.endswith(":") else "" # Add slashes to root of the protocol
return path_as_posix
def xpathjoin(a: Path, *p: Tuple[str, ...]):
"""Extend :func:`xjoin` to support argument of type :obj:`~pathlib.Path`.
Args:
a (:obj:`~pathlib.Path`): Calling Path instance.
*p (:obj:`tuple` of :obj:`str`): Other path components.
Returns:
obj:`str`
"""
return type(a)(xjoin(_as_posix(a), *p))
def _add_retries_to_file_obj_read_method(file_obj):
read = file_obj.read
max_retries = config.STREAMING_READ_MAX_RETRIES
def read_with_retries(*args, **kwargs):
for retry in range(1, max_retries + 1):
try:
out = read(*args, **kwargs)
break
except ClientError:
logger.warning(
f"Got disconnected from remote data host. Retrying in {config.STREAMING_READ_RETRY_INTERVAL}sec [{retry}/{max_retries}]"
)
time.sleep(config.STREAMING_READ_RETRY_INTERVAL)
else:
raise ConnectionError("Server Disconnected")
return out
file_obj.read = read_with_retries
def _get_extraction_protocol(urlpath: str) -> Optional[str]:
# get inner file: zip://train-00000.json.gz::https://foo.bar/data.zip -> zip://train-00000.json.gz
path = urlpath.split("::")[0]
# remove "dl=1" query param: https://foo.bar/train.json.gz?dl=1 -> https://foo.bar/train.json.gz
suf = "?dl=1"
if path.endswith(suf):
path = path[: -len(suf)]
# Get extension: https://foo.bar/train.json.gz -> gz
extension = path.split(".")[-1]
if extension in BASE_KNOWN_EXTENSIONS:
return None
elif path.endswith(".tar.gz") or path.endswith(".tgz"):
pass
elif extension in COMPRESSION_EXTENSION_TO_PROTOCOL:
return COMPRESSION_EXTENSION_TO_PROTOCOL[extension]
raise NotImplementedError(f"Extraction protocol for file at {urlpath} is not implemented yet")
def xopen(file, mode="r", *args, **kwargs):
"""
This function extends the builtin `open` function to support remote files using fsspec.
It also has a retry mechanism in case connection fails.
The args and kwargs are passed to fsspec.open, except `use_auth_token` which is used for queries to private repos on huggingface.co
"""
if fsspec.get_fs_token_paths(file)[0].protocol == "https":
kwargs["headers"] = get_authentication_headers_for_url(file, use_auth_token=kwargs.pop("use_auth_token", None))
file_obj = fsspec.open(file, mode=mode, *args, **kwargs).open()
_add_retries_to_file_obj_read_method(file_obj)
return file_obj
def xpathopen(path: Path, *args, **kwargs):
"""Extend :func:`xopen` to support argument of type :obj:`~pathlib.Path`.
Args:
path (:obj:`~pathlib.Path`): Calling Path instance.
**kwargs: Keyword arguments passed to :func:`fsspec.open`.
Returns:
:obj:`io.FileIO`: File-like object.
"""
return xopen(_as_posix(path), *args, **kwargs)
def xpathglob(path, pattern):
"""Glob function for argument of type :obj:`~pathlib.Path` that supports both local paths end remote URLs.
Args:
path (:obj:`~pathlib.Path`): Calling Path instance.
pattern (:obj:`str`): Pattern that resulting paths must match.
Yields:
:obj:`~pathlib.Path`
"""
posix_path = _as_posix(path)
main_hop, *rest_hops = posix_path.split("::")
if is_local_path(main_hop):
yield from Path(main_hop).glob(pattern)
else:
fs, *_ = fsspec.get_fs_token_paths(xjoin(posix_path, pattern))
# - If there's no "*" in the pattern, get_fs_token_paths() doesn't do any pattern matching
# so to be able to glob patterns like "[0-9]", we have to call `fs.glob`.
# - Also "*" in get_fs_token_paths() only matches files: we have to call `fs.glob` to match directories.
# - If there is "**" in the pattern, `fs.glob` must be called anyway.
globbed_paths = fs.glob(xjoin(main_hop, pattern))
for globbed_path in globbed_paths:
yield type(path)("::".join([f"{fs.protocol}://{globbed_path}"] + rest_hops))
def xpathrglob(path, pattern):
"""Rglob function for argument of type :obj:`~pathlib.Path` that supports both local paths end remote URLs.
Args:
path (:obj:`~pathlib.Path`): Calling Path instance.
pattern (:obj:`str`): Pattern that resulting paths must match.
Yields:
:obj:`~pathlib.Path`
"""
return xpathglob(path, "**/" + pattern)
def xpathstem(path: Path):
"""Stem function for argument of type :obj:`~pathlib.Path` that supports both local paths end remote URLs.
Args:
path (:obj:`~pathlib.Path`): Calling Path instance.
Returns:
:obj:`str`
"""
return PurePosixPath(_as_posix(path).split("::")[0]).stem
def xpathsuffix(path: Path):
"""Suffix function for argument of type :obj:`~pathlib.Path` that supports both local paths end remote URLs.
Args:
path (:obj:`~pathlib.Path`): Calling Path instance.
Returns:
:obj:`str`
"""
return PurePosixPath(_as_posix(path).split("::")[0]).suffix
def xpandas_read_csv(path, **kwargs):
import pandas as pd
return pd.read_csv(xopen(path), **kwargs)
class StreamingDownloadManager(object):
"""
Download manager that uses the "::" separator to navigate through (possibly remote) compressed archives.
Contrary to the regular DownloadManager, the `download` and `extract` methods don't actually download nor extract
data, but they rather return the path or url that could be opened using the `xopen` function which extends the
builtin `open` function to stream data from remote files.
"""
def __init__(
self,
dataset_name: Optional[str] = None,
data_dir: Optional[str] = None,
download_config: Optional[DownloadConfig] = None,
base_path: Optional[str] = None,
):
self._dataset_name = dataset_name
self._data_dir = data_dir
self._download_config = download_config or DownloadConfig()
self._base_path = base_path or os.path.abspath(".")
@property
def manual_dir(self):
return self._data_dir
def download(self, url_or_urls):
url_or_urls = map_nested(self._download, url_or_urls, map_tuple=True)
return url_or_urls
def _download(self, urlpath: str) -> str:
urlpath = str(urlpath)
if is_relative_path(urlpath):
# append the relative path to the base_path
urlpath = url_or_path_join(self._base_path, urlpath)
return urlpath
def extract(self, path_or_paths):
urlpaths = map_nested(self._extract, path_or_paths, map_tuple=True)
return urlpaths
def _extract(self, urlpath: str) -> str:
urlpath = str(urlpath)
protocol = _get_extraction_protocol(urlpath)
if protocol is None:
# no extraction
return urlpath
elif protocol in SINGLE_FILE_COMPRESSION_PROTOCOLS:
# there is one single file which is the uncompressed file
inner_file = os.path.basename(urlpath.split("::")[0])
inner_file = inner_file[: inner_file.rindex(".")]
# check for tar.gz, tar.bz2 etc.
if inner_file.endswith(".tar"):
return f"tar://::{urlpath}"
else:
return f"{protocol}://{inner_file}::{urlpath}"
else:
return f"{protocol}://::{urlpath}"
def download_and_extract(self, url_or_urls):
return self.extract(self.download(url_or_urls))
| 35.904153
| 140
| 0.654832
|
264d9aa9fb447555b6ea42e97edd0462c1503277
| 13,824
|
py
|
Python
|
sdk/python/pulumi_azure_native/changeanalysis/outputs.py
|
polivbr/pulumi-azure-native
|
09571f3bf6bdc4f3621aabefd1ba6c0d4ecfb0e7
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure_native/changeanalysis/outputs.py
|
polivbr/pulumi-azure-native
|
09571f3bf6bdc4f3621aabefd1ba6c0d4ecfb0e7
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure_native/changeanalysis/outputs.py
|
polivbr/pulumi-azure-native
|
09571f3bf6bdc4f3621aabefd1ba6c0d4ecfb0e7
|
[
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._enums import *
__all__ = [
'AzureMonitorWorkspacePropertiesResponse',
'ConfigurationProfileResourcePropertiesResponse',
'NotificationSettingsResponse',
'ResourceIdentityResponse',
'SystemDataResponse',
]
@pulumi.output_type
class AzureMonitorWorkspacePropertiesResponse(dict):
"""
Configuration properties of an Azure Monitor workspace that receives change notifications.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "includeChangeDetails":
suggest = "include_change_details"
elif key == "workspaceId":
suggest = "workspace_id"
elif key == "workspaceResourceId":
suggest = "workspace_resource_id"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in AzureMonitorWorkspacePropertiesResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
AzureMonitorWorkspacePropertiesResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
AzureMonitorWorkspacePropertiesResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
include_change_details: Optional[str] = None,
workspace_id: Optional[str] = None,
workspace_resource_id: Optional[str] = None):
"""
Configuration properties of an Azure Monitor workspace that receives change notifications.
:param str include_change_details: The mode of includeChangeDetails feature. The flag configures whether to include or exclude content of the change before and after values.
:param str workspace_id: The Azure Monitor workspace ID - the unique identifier for the Log Analytics workspace.
:param str workspace_resource_id: The Azure Monitor workspace ARM Resource ID. The resource ID should be in the following format: /subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}
"""
if include_change_details is not None:
pulumi.set(__self__, "include_change_details", include_change_details)
if workspace_id is not None:
pulumi.set(__self__, "workspace_id", workspace_id)
if workspace_resource_id is not None:
pulumi.set(__self__, "workspace_resource_id", workspace_resource_id)
@property
@pulumi.getter(name="includeChangeDetails")
def include_change_details(self) -> Optional[str]:
"""
The mode of includeChangeDetails feature. The flag configures whether to include or exclude content of the change before and after values.
"""
return pulumi.get(self, "include_change_details")
@property
@pulumi.getter(name="workspaceId")
def workspace_id(self) -> Optional[str]:
"""
The Azure Monitor workspace ID - the unique identifier for the Log Analytics workspace.
"""
return pulumi.get(self, "workspace_id")
@property
@pulumi.getter(name="workspaceResourceId")
def workspace_resource_id(self) -> Optional[str]:
"""
The Azure Monitor workspace ARM Resource ID. The resource ID should be in the following format: /subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}
"""
return pulumi.get(self, "workspace_resource_id")
@pulumi.output_type
class ConfigurationProfileResourcePropertiesResponse(dict):
"""
The properties of a configuration profile.
"""
def __init__(__self__, *,
notifications: Optional['outputs.NotificationSettingsResponse'] = None):
"""
The properties of a configuration profile.
:param 'NotificationSettingsResponse' notifications: Settings of change notification configuration for a subscription.
"""
if notifications is not None:
pulumi.set(__self__, "notifications", notifications)
@property
@pulumi.getter
def notifications(self) -> Optional['outputs.NotificationSettingsResponse']:
"""
Settings of change notification configuration for a subscription.
"""
return pulumi.get(self, "notifications")
@pulumi.output_type
class NotificationSettingsResponse(dict):
"""
Settings of change notification configuration for a subscription.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "activationState":
suggest = "activation_state"
elif key == "azureMonitorWorkspaceProperties":
suggest = "azure_monitor_workspace_properties"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in NotificationSettingsResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
NotificationSettingsResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
NotificationSettingsResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
activation_state: Optional[str] = None,
azure_monitor_workspace_properties: Optional['outputs.AzureMonitorWorkspacePropertiesResponse'] = None):
"""
Settings of change notification configuration for a subscription.
:param str activation_state: The state of notifications feature.
:param 'AzureMonitorWorkspacePropertiesResponse' azure_monitor_workspace_properties: Configuration properties of an Azure Monitor workspace that receives change notifications.
"""
if activation_state is not None:
pulumi.set(__self__, "activation_state", activation_state)
if azure_monitor_workspace_properties is not None:
pulumi.set(__self__, "azure_monitor_workspace_properties", azure_monitor_workspace_properties)
@property
@pulumi.getter(name="activationState")
def activation_state(self) -> Optional[str]:
"""
The state of notifications feature.
"""
return pulumi.get(self, "activation_state")
@property
@pulumi.getter(name="azureMonitorWorkspaceProperties")
def azure_monitor_workspace_properties(self) -> Optional['outputs.AzureMonitorWorkspacePropertiesResponse']:
"""
Configuration properties of an Azure Monitor workspace that receives change notifications.
"""
return pulumi.get(self, "azure_monitor_workspace_properties")
@pulumi.output_type
class ResourceIdentityResponse(dict):
"""
The identity block returned by ARM resource that supports managed identity.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "principalId":
suggest = "principal_id"
elif key == "tenantId":
suggest = "tenant_id"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ResourceIdentityResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ResourceIdentityResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ResourceIdentityResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
principal_id: str,
tenant_id: str,
type: Optional[str] = None):
"""
The identity block returned by ARM resource that supports managed identity.
:param str principal_id: The principal id of the identity. This property will only be provided for a system-assigned identity.
:param str tenant_id: The tenant id associated with the resource's identity. This property will only be provided for a system-assigned identity.
:param str type: The type of managed identity used. The type 'SystemAssigned, UserAssigned' includes both an implicitly created identity and a set of user-assigned identities. The type 'None' will remove any identities.
"""
pulumi.set(__self__, "principal_id", principal_id)
pulumi.set(__self__, "tenant_id", tenant_id)
if type is not None:
pulumi.set(__self__, "type", type)
@property
@pulumi.getter(name="principalId")
def principal_id(self) -> str:
"""
The principal id of the identity. This property will only be provided for a system-assigned identity.
"""
return pulumi.get(self, "principal_id")
@property
@pulumi.getter(name="tenantId")
def tenant_id(self) -> str:
"""
The tenant id associated with the resource's identity. This property will only be provided for a system-assigned identity.
"""
return pulumi.get(self, "tenant_id")
@property
@pulumi.getter
def type(self) -> Optional[str]:
"""
The type of managed identity used. The type 'SystemAssigned, UserAssigned' includes both an implicitly created identity and a set of user-assigned identities. The type 'None' will remove any identities.
"""
return pulumi.get(self, "type")
@pulumi.output_type
class SystemDataResponse(dict):
"""
Top level metadata https://github.com/Azure/azure-resource-manager-rpc/blob/master/v1.0/common-api-contracts.md#system-metadata-for-all-azure-resources
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "createdAt":
suggest = "created_at"
elif key == "createdBy":
suggest = "created_by"
elif key == "createdByType":
suggest = "created_by_type"
elif key == "lastModifiedAt":
suggest = "last_modified_at"
elif key == "lastModifiedBy":
suggest = "last_modified_by"
elif key == "lastModifiedByType":
suggest = "last_modified_by_type"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in SystemDataResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
SystemDataResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
SystemDataResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
created_at: str,
created_by: str,
created_by_type: str,
last_modified_at: str,
last_modified_by: str,
last_modified_by_type: str):
"""
Top level metadata https://github.com/Azure/azure-resource-manager-rpc/blob/master/v1.0/common-api-contracts.md#system-metadata-for-all-azure-resources
:param str created_at: The timestamp of resource creation (UTC)
:param str created_by: A string identifier for the identity that created the resource
:param str created_by_type: The type of identity that created the resource: user, application, managedIdentity, key
:param str last_modified_at: The timestamp of resource last modification (UTC)
:param str last_modified_by: A string identifier for the identity that last modified the resource
:param str last_modified_by_type: The type of identity that last modified the resource: user, application, managedIdentity, key
"""
pulumi.set(__self__, "created_at", created_at)
pulumi.set(__self__, "created_by", created_by)
pulumi.set(__self__, "created_by_type", created_by_type)
pulumi.set(__self__, "last_modified_at", last_modified_at)
pulumi.set(__self__, "last_modified_by", last_modified_by)
pulumi.set(__self__, "last_modified_by_type", last_modified_by_type)
@property
@pulumi.getter(name="createdAt")
def created_at(self) -> str:
"""
The timestamp of resource creation (UTC)
"""
return pulumi.get(self, "created_at")
@property
@pulumi.getter(name="createdBy")
def created_by(self) -> str:
"""
A string identifier for the identity that created the resource
"""
return pulumi.get(self, "created_by")
@property
@pulumi.getter(name="createdByType")
def created_by_type(self) -> str:
"""
The type of identity that created the resource: user, application, managedIdentity, key
"""
return pulumi.get(self, "created_by_type")
@property
@pulumi.getter(name="lastModifiedAt")
def last_modified_at(self) -> str:
"""
The timestamp of resource last modification (UTC)
"""
return pulumi.get(self, "last_modified_at")
@property
@pulumi.getter(name="lastModifiedBy")
def last_modified_by(self) -> str:
"""
A string identifier for the identity that last modified the resource
"""
return pulumi.get(self, "last_modified_by")
@property
@pulumi.getter(name="lastModifiedByType")
def last_modified_by_type(self) -> str:
"""
The type of identity that last modified the resource: user, application, managedIdentity, key
"""
return pulumi.get(self, "last_modified_by_type")
| 41.389222
| 271
| 0.67419
|
4d42914389da921d7a1c15c79345c932493f67ab
| 1,612
|
py
|
Python
|
wristband/constants.py
|
MONICA-Project/scral-framework
|
ad9ff066cd204ea7bf5099866c53ae320800995e
|
[
"Apache-2.0"
] | null | null | null |
wristband/constants.py
|
MONICA-Project/scral-framework
|
ad9ff066cd204ea7bf5099866c53ae320800995e
|
[
"Apache-2.0"
] | null | null | null |
wristband/constants.py
|
MONICA-Project/scral-framework
|
ad9ff066cd204ea7bf5099866c53ae320800995e
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#############################################################################
# _____ __________ ___ __ #
# / ___// ____/ __ \/ | / / #
# \__ \/ / / /_/ / /| | / / #
# ___/ / /___/ _, _/ ___ |/ /___ #
# /____/\____/_/ |_/_/ |_/_____/ Smart City Resource Adaptation Layer #
# #
# LINKS Foundation, (c) 2017-2020 #
# developed by Jacopo Foglietti & Luca Mannella #
# SCRAL is distributed under a BSD-style license -- See file LICENSE.md #
# #
#############################################################################
"""
SCRAL - constants
This file contains useful constants for this module.
"""
# URI
URI_DEFAULT = "/scral/v1.0/wristband-gw"
URI_ACTIVE_DEVICES = URI_DEFAULT + "/active-devices"
URI_WRISTBAND = URI_DEFAULT + "/wearable"
# Observed Property
PROPERTY_LOCALIZATION_NAME = "Localization-Wristband"
PROPERTY_BUTTON_NAME = "Button-Wristband"
# Fixed values
SENSOR_ULTRAWIDEBAND_SCRAL = "UWB"
SENSOR_ULTRAWIDEBAND_DEXELS = "uwb"
SENSOR_ASSOCIATION_NAME = "WRISTBAND-GW/Friend-Connect/Friend-Connect-Request"
# Keys
TAG_ID_KEY = "tagId"
ID1_ASSOCIATION_KEY = "tagId_1"
ID2_ASSOCIATION_KEY = "tagId_2"
TIME_KEY = "timestamp"
| 41.333333
| 78
| 0.450372
|
1ceaab12466feb4301fc7f45b63b129034c7bbdf
| 574
|
py
|
Python
|
tests/share/tasks/__init__.py
|
felliott/SHARE
|
8fd60ff4749349c9b867f6188650d71f4f0a1a56
|
[
"Apache-2.0"
] | 87
|
2015-01-06T18:24:45.000Z
|
2021-08-08T07:59:40.000Z
|
tests/share/tasks/__init__.py
|
fortress-biotech/SHARE
|
9c5a05dd831447949fa6253afec5225ff8ab5d4f
|
[
"Apache-2.0"
] | 442
|
2015-01-01T19:16:01.000Z
|
2022-03-30T21:10:26.000Z
|
tests/share/tasks/__init__.py
|
fortress-biotech/SHARE
|
9c5a05dd831447949fa6253afec5225ff8ab5d4f
|
[
"Apache-2.0"
] | 67
|
2015-03-10T16:32:58.000Z
|
2021-11-12T16:33:41.000Z
|
import threading
class SyncedThread(threading.Thread):
def __init__(self, target, args=(), kwargs={}):
self._end = threading.Event()
self._start = threading.Event()
def _target(*args, **kwargs):
with target(*args, **kwargs):
self._start.set()
self._end.wait(10)
super().__init__(target=_target, args=args, kwargs=kwargs)
def start(self):
super().start()
self._start.wait(10)
def join(self, timeout=1):
self._end.set()
return super().join(timeout)
| 23.916667
| 66
| 0.569686
|
27947a93d459905a3b21e377e5aff41c54450c94
| 3,623
|
py
|
Python
|
poseidon/baseClasses/Rabbit_Base.py
|
danielpops/poseidon
|
290405b02a0cd46dcbfafceded12ddc06b7a641a
|
[
"Apache-2.0"
] | null | null | null |
poseidon/baseClasses/Rabbit_Base.py
|
danielpops/poseidon
|
290405b02a0cd46dcbfafceded12ddc06b7a641a
|
[
"Apache-2.0"
] | null | null | null |
poseidon/baseClasses/Rabbit_Base.py
|
danielpops/poseidon
|
290405b02a0cd46dcbfafceded12ddc06b7a641a
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2017 In-Q-Tel, Inc, All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
''' Created on 21 August 2017
@author: dgrossman
'''
import pika
import threading
import time
from functools import partial
from .Logger_Base import Logger
module_logger = Logger
class Rabbit_Base(object): # pragma: no cover
'''
Base Class for RabbitMQ
'''
def __init__(self):
self.logger = module_logger.logger
def make_rabbit_connection(self, host, port, exchange, queue_name, keys,
total_sleep=float('inf')): # pragma: no cover
'''
Connects to rabbitmq using the given hostname,
exchange, and queue. Retries on failure until success.
Binds routing keys appropriate for module, and returns
the channel and connection.
'''
wait = True
do_rabbit = True
rabbit_channel = None
rabbit_connection = None
while wait and total_sleep > 0:
try:
rabbit_connection = pika.BlockingConnection(
pika.ConnectionParameters(host=host, port=port))
rabbit_channel = rabbit_connection.channel()
rabbit_channel.exchange_declare(exchange=exchange,
exchange_type='topic')
rabbit_channel.queue_declare(queue=queue_name, exclusive=True)
self.logger.debug('connected to {0} rabbitmq...'.format(host))
wait = False
except Exception as e:
self.logger.debug(
'waiting for connection to {0} rabbitmq...'.format(host))
self.logger.debug(str(e))
time.sleep(2)
total_sleep -= 2
wait = True
if wait:
do_rabbit = False
if isinstance(keys, list) and not wait:
for key in keys:
self.logger.debug(
'array adding key:{0} to rabbitmq channel'.format(key))
rabbit_channel.queue_bind(exchange=exchange,
queue=queue_name,
routing_key=key)
if isinstance(keys, str) and not wait:
self.logger.debug(
'string adding key:{0} to rabbitmq channel'.format(keys))
rabbit_channel.queue_bind(exchange=exchange,
queue=queue_name,
routing_key=keys)
return rabbit_channel, rabbit_connection, do_rabbit
def start_channel(self, channel, mycallback, queue, m_queue):
''' handle threading for messagetype '''
self.logger.debug('about to start channel {0}'.format(channel))
channel.basic_consume(partial(mycallback, q=m_queue), queue=queue,
no_ack=True)
mq_recv_thread = threading.Thread(target=channel.start_consuming)
mq_recv_thread.start()
return mq_recv_thread
| 36.969388
| 78
| 0.591499
|
9f081b9fa9ccc679f07d5eea77807e66bf5308d6
| 156
|
py
|
Python
|
striplog/_version.py
|
rgmyr/striplog
|
9813f1c5b109de60f0717cdf0018042cd8ddeb69
|
[
"Apache-2.0"
] | 1
|
2021-05-18T08:23:58.000Z
|
2021-05-18T08:23:58.000Z
|
striplog/_version.py
|
rgmyr/striplog
|
9813f1c5b109de60f0717cdf0018042cd8ddeb69
|
[
"Apache-2.0"
] | null | null | null |
striplog/_version.py
|
rgmyr/striplog
|
9813f1c5b109de60f0717cdf0018042cd8ddeb69
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Version.
Doing it this way provides for access in setup.py and via __version__
"""
__version__ = "0.8.0"
| 17.333333
| 69
| 0.660256
|
69665378468280c82a4c776f01d8ed79b1d6df6d
| 623
|
py
|
Python
|
work/yml/manage.py
|
judebues/softmanage
|
3882534422c09cc3a6978890e51fff9ff465de24
|
[
"MIT"
] | 1
|
2020-05-21T06:48:34.000Z
|
2020-05-21T06:48:34.000Z
|
work/yml/manage.py
|
judebues/softmanage
|
3882534422c09cc3a6978890e51fff9ff465de24
|
[
"MIT"
] | 3
|
2021-03-19T03:07:36.000Z
|
2021-04-08T20:33:38.000Z
|
work/yml/manage.py
|
judebues/softmanage
|
3882534422c09cc3a6978890e51fff9ff465de24
|
[
"MIT"
] | 1
|
2020-05-21T06:48:36.000Z
|
2020-05-21T06:48:36.000Z
|
#!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'yml.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| 28.318182
| 73
| 0.680578
|
e060d94fd3c0a061f5cd541e0d5276b6ead987ba
| 56,803
|
py
|
Python
|
venv/Lib/site-packages/sqlalchemy/dialects/oracle/base.py
|
sunausti/mywebdemo
|
884bcf3b68e0063dcb08c602f0dc784753ec8a87
|
[
"Apache-2.0"
] | 1
|
2021-11-11T08:52:09.000Z
|
2021-11-11T08:52:09.000Z
|
venv/Lib/site-packages/sqlalchemy/dialects/oracle/base.py
|
sunausti/mywebdemo
|
884bcf3b68e0063dcb08c602f0dc784753ec8a87
|
[
"Apache-2.0"
] | null | null | null |
venv/Lib/site-packages/sqlalchemy/dialects/oracle/base.py
|
sunausti/mywebdemo
|
884bcf3b68e0063dcb08c602f0dc784753ec8a87
|
[
"Apache-2.0"
] | null | null | null |
# oracle/base.py
# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""
.. dialect:: oracle
:name: Oracle
Oracle version 8 through current (11g at the time of this writing) are
supported.
Connect Arguments
-----------------
The dialect supports several :func:`~sqlalchemy.create_engine()` arguments
which affect the behavior of the dialect regardless of driver in use.
* ``use_ansi`` - Use ANSI JOIN constructs (see the section on Oracle 8).
Defaults to ``True``. If ``False``, Oracle-8 compatible constructs are used
for joins.
* ``optimize_limits`` - defaults to ``False``. see the section on
LIMIT/OFFSET.
* ``use_binds_for_limits`` - defaults to ``True``. see the section on
LIMIT/OFFSET.
Auto Increment Behavior
-----------------------
SQLAlchemy Table objects which include integer primary keys are usually
assumed to have "autoincrementing" behavior, meaning they can generate their
own primary key values upon INSERT. Since Oracle has no "autoincrement"
feature, SQLAlchemy relies upon sequences to produce these values. With the
Oracle dialect, *a sequence must always be explicitly specified to enable
autoincrement*. This is divergent with the majority of documentation
examples which assume the usage of an autoincrement-capable database. To
specify sequences, use the sqlalchemy.schema.Sequence object which is passed
to a Column construct::
t = Table('mytable', metadata,
Column('id', Integer, Sequence('id_seq'), primary_key=True),
Column(...), ...
)
This step is also required when using table reflection, i.e. autoload=True::
t = Table('mytable', metadata,
Column('id', Integer, Sequence('id_seq'), primary_key=True),
autoload=True
)
Identifier Casing
-----------------
In Oracle, the data dictionary represents all case insensitive identifier
names using UPPERCASE text. SQLAlchemy on the other hand considers an
all-lower case identifier name to be case insensitive. The Oracle dialect
converts all case insensitive identifiers to and from those two formats during
schema level communication, such as reflection of tables and indexes. Using
an UPPERCASE name on the SQLAlchemy side indicates a case sensitive
identifier, and SQLAlchemy will quote the name - this will cause mismatches
against data dictionary data received from Oracle, so unless identifier names
have been truly created as case sensitive (i.e. using quoted names), all
lowercase names should be used on the SQLAlchemy side.
LIMIT/OFFSET Support
--------------------
Oracle has no support for the LIMIT or OFFSET keywords. SQLAlchemy uses
a wrapped subquery approach in conjunction with ROWNUM. The exact methodology
is taken from
http://www.oracle.com/technetwork/issue-archive/2006/06-sep/o56asktom-086197.html .
There are two options which affect its behavior:
* the "FIRST ROWS()" optimization keyword is not used by default. To enable
the usage of this optimization directive, specify ``optimize_limits=True``
to :func:`.create_engine`.
* the values passed for the limit/offset are sent as bound parameters. Some
users have observed that Oracle produces a poor query plan when the values
are sent as binds and not rendered literally. To render the limit/offset
values literally within the SQL statement, specify
``use_binds_for_limits=False`` to :func:`.create_engine`.
Some users have reported better performance when the entirely different
approach of a window query is used, i.e. ROW_NUMBER() OVER (ORDER BY), to
provide LIMIT/OFFSET (note that the majority of users don't observe this).
To suit this case the method used for LIMIT/OFFSET can be replaced entirely.
See the recipe at
http://www.sqlalchemy.org/trac/wiki/UsageRecipes/WindowFunctionsByDefault
which installs a select compiler that overrides the generation of limit/offset
with a window function.
.. _oracle_returning:
RETURNING Support
-----------------
The Oracle database supports a limited form of RETURNING, in order to retrieve
result sets of matched rows from INSERT, UPDATE and DELETE statements.
Oracle's RETURNING..INTO syntax only supports one row being returned, as it
relies upon OUT parameters in order to function. In addition, supported
DBAPIs have further limitations (see :ref:`cx_oracle_returning`).
SQLAlchemy's "implicit returning" feature, which employs RETURNING within an
INSERT and sometimes an UPDATE statement in order to fetch newly generated
primary key values and other SQL defaults and expressions, is normally enabled
on the Oracle backend. By default, "implicit returning" typically only
fetches the value of a single ``nextval(some_seq)`` expression embedded into
an INSERT in order to increment a sequence within an INSERT statement and get
the value back at the same time. To disable this feature across the board,
specify ``implicit_returning=False`` to :func:`.create_engine`::
engine = create_engine("oracle://scott:tiger@dsn",
implicit_returning=False)
Implicit returning can also be disabled on a table-by-table basis as a table
option::
# Core Table
my_table = Table("my_table", metadata, ..., implicit_returning=False)
# declarative
class MyClass(Base):
__tablename__ = 'my_table'
__table_args__ = {"implicit_returning": False}
.. seealso::
:ref:`cx_oracle_returning` - additional cx_oracle-specific restrictions on
implicit returning.
ON UPDATE CASCADE
-----------------
Oracle doesn't have native ON UPDATE CASCADE functionality. A trigger based
solution is available at
http://asktom.oracle.com/tkyte/update_cascade/index.html .
When using the SQLAlchemy ORM, the ORM has limited ability to manually issue
cascading updates - specify ForeignKey objects using the
"deferrable=True, initially='deferred'" keyword arguments,
and specify "passive_updates=False" on each relationship().
Oracle 8 Compatibility
----------------------
When Oracle 8 is detected, the dialect internally configures itself to the
following behaviors:
* the use_ansi flag is set to False. This has the effect of converting all
JOIN phrases into the WHERE clause, and in the case of LEFT OUTER JOIN
makes use of Oracle's (+) operator.
* the NVARCHAR2 and NCLOB datatypes are no longer generated as DDL when
the :class:`~sqlalchemy.types.Unicode` is used - VARCHAR2 and CLOB are
issued instead. This because these types don't seem to work correctly on
Oracle 8 even though they are available. The
:class:`~sqlalchemy.types.NVARCHAR` and
:class:`~sqlalchemy.dialects.oracle.NCLOB` types will always generate
NVARCHAR2 and NCLOB.
* the "native unicode" mode is disabled when using cx_oracle, i.e. SQLAlchemy
encodes all Python unicode objects to "string" before passing in as bind
parameters.
Synonym/DBLINK Reflection
-------------------------
When using reflection with Table objects, the dialect can optionally search
for tables indicated by synonyms, either in local or remote schemas or
accessed over DBLINK, by passing the flag ``oracle_resolve_synonyms=True`` as
a keyword argument to the :class:`.Table` construct::
some_table = Table('some_table', autoload=True,
autoload_with=some_engine,
oracle_resolve_synonyms=True)
When this flag is set, the given name (such as ``some_table`` above) will
be searched not just in the ``ALL_TABLES`` view, but also within the
``ALL_SYNONYMS`` view to see if this name is actually a synonym to another
name. If the synonym is located and refers to a DBLINK, the oracle dialect
knows how to locate the table's information using DBLINK syntax(e.g.
``@dblink``).
``oracle_resolve_synonyms`` is accepted wherever reflection arguments are
accepted, including methods such as :meth:`.MetaData.reflect` and
:meth:`.Inspector.get_columns`.
If synonyms are not in use, this flag should be left disabled.
DateTime Compatibility
----------------------
Oracle has no datatype known as ``DATETIME``, it instead has only ``DATE``,
which can actually store a date and time value. For this reason, the Oracle
dialect provides a type :class:`.oracle.DATE` which is a subclass of
:class:`.DateTime`. This type has no special behavior, and is only
present as a "marker" for this type; additionally, when a database column
is reflected and the type is reported as ``DATE``, the time-supporting
:class:`.oracle.DATE` type is used.
.. versionchanged:: 0.9.4 Added :class:`.oracle.DATE` to subclass
:class:`.DateTime`. This is a change as previous versions
would reflect a ``DATE`` column as :class:`.types.DATE`, which subclasses
:class:`.Date`. The only significance here is for schemes that are
examining the type of column for use in special Python translations or
for migrating schemas to other database backends.
.. _oracle_table_options:
Oracle Table Options
-------------------------
The CREATE TABLE phrase supports the following options with Oracle
in conjunction with the :class:`.Table` construct:
* ``ON COMMIT``::
Table(
"some_table", metadata, ...,
prefixes=['GLOBAL TEMPORARY'], oracle_on_commit='PRESERVE ROWS')
.. versionadded:: 1.0.0
* ``COMPRESS``::
Table('mytable', metadata, Column('data', String(32)),
oracle_compress=True)
Table('mytable', metadata, Column('data', String(32)),
oracle_compress=6)
The ``oracle_compress`` parameter accepts either an integer compression
level, or ``True`` to use the default compression level.
.. versionadded:: 1.0.0
.. _oracle_index_options:
Oracle Specific Index Options
-----------------------------
Bitmap Indexes
~~~~~~~~~~~~~~
You can specify the ``oracle_bitmap`` parameter to create a bitmap index
instead of a B-tree index::
Index('my_index', my_table.c.data, oracle_bitmap=True)
Bitmap indexes cannot be unique and cannot be compressed. SQLAlchemy will not
check for such limitations, only the database will.
.. versionadded:: 1.0.0
Index compression
~~~~~~~~~~~~~~~~~
Oracle has a more efficient storage mode for indexes containing lots of
repeated values. Use the ``oracle_compress`` parameter to turn on key c
ompression::
Index('my_index', my_table.c.data, oracle_compress=True)
Index('my_index', my_table.c.data1, my_table.c.data2, unique=True,
oracle_compress=1)
The ``oracle_compress`` parameter accepts either an integer specifying the
number of prefix columns to compress, or ``True`` to use the default (all
columns for non-unique indexes, all but the last column for unique indexes).
.. versionadded:: 1.0.0
"""
import re
from sqlalchemy import util, sql
from sqlalchemy.engine import default, reflection
from sqlalchemy.sql import compiler, visitors, expression, util as sql_util
from sqlalchemy.sql import operators as sql_operators
from sqlalchemy.sql.elements import quoted_name
from sqlalchemy import types as sqltypes, schema as sa_schema
from sqlalchemy.types import VARCHAR, NVARCHAR, CHAR, \
BLOB, CLOB, TIMESTAMP, FLOAT
RESERVED_WORDS = \
set('SHARE RAW DROP BETWEEN FROM DESC OPTION PRIOR LONG THEN '
'DEFAULT ALTER IS INTO MINUS INTEGER NUMBER GRANT IDENTIFIED '
'ALL TO ORDER ON FLOAT DATE HAVING CLUSTER NOWAIT RESOURCE '
'ANY TABLE INDEX FOR UPDATE WHERE CHECK SMALLINT WITH DELETE '
'BY ASC REVOKE LIKE SIZE RENAME NOCOMPRESS NULL GROUP VALUES '
'AS IN VIEW EXCLUSIVE COMPRESS SYNONYM SELECT INSERT EXISTS '
'NOT TRIGGER ELSE CREATE INTERSECT PCTFREE DISTINCT USER '
'CONNECT SET MODE OF UNIQUE VARCHAR2 VARCHAR LOCK OR CHAR '
'DECIMAL UNION PUBLIC AND START UID COMMENT CURRENT LEVEL'.split())
NO_ARG_FNS = set('UID CURRENT_DATE SYSDATE USER '
'CURRENT_TIME CURRENT_TIMESTAMP'.split())
class RAW(sqltypes._Binary):
__visit_name__ = 'RAW'
OracleRaw = RAW
class NCLOB(sqltypes.Text):
__visit_name__ = 'NCLOB'
class VARCHAR2(VARCHAR):
__visit_name__ = 'VARCHAR2'
NVARCHAR2 = NVARCHAR
class NUMBER(sqltypes.Numeric, sqltypes.Integer):
__visit_name__ = 'NUMBER'
def __init__(self, precision=None, scale=None, asdecimal=None):
if asdecimal is None:
asdecimal = bool(scale and scale > 0)
super(NUMBER, self).__init__(
precision=precision, scale=scale, asdecimal=asdecimal)
def adapt(self, impltype):
ret = super(NUMBER, self).adapt(impltype)
# leave a hint for the DBAPI handler
ret._is_oracle_number = True
return ret
@property
def _type_affinity(self):
if bool(self.scale and self.scale > 0):
return sqltypes.Numeric
else:
return sqltypes.Integer
class DOUBLE_PRECISION(sqltypes.Numeric):
__visit_name__ = 'DOUBLE_PRECISION'
def __init__(self, precision=None, scale=None, asdecimal=None):
if asdecimal is None:
asdecimal = False
super(DOUBLE_PRECISION, self).__init__(
precision=precision, scale=scale, asdecimal=asdecimal)
class BFILE(sqltypes.LargeBinary):
__visit_name__ = 'BFILE'
class LONG(sqltypes.Text):
__visit_name__ = 'LONG'
class DATE(sqltypes.DateTime):
"""Provide the oracle DATE type.
This type has no special Python behavior, except that it subclasses
:class:`.types.DateTime`; this is to suit the fact that the Oracle
``DATE`` type supports a time value.
.. versionadded:: 0.9.4
"""
__visit_name__ = 'DATE'
def _compare_type_affinity(self, other):
return other._type_affinity in (sqltypes.DateTime, sqltypes.Date)
class INTERVAL(sqltypes.TypeEngine):
__visit_name__ = 'INTERVAL'
def __init__(self,
day_precision=None,
second_precision=None):
"""Construct an INTERVAL.
Note that only DAY TO SECOND intervals are currently supported.
This is due to a lack of support for YEAR TO MONTH intervals
within available DBAPIs (cx_oracle and zxjdbc).
:param day_precision: the day precision value. this is the number of
digits to store for the day field. Defaults to "2"
:param second_precision: the second precision value. this is the
number of digits to store for the fractional seconds field.
Defaults to "6".
"""
self.day_precision = day_precision
self.second_precision = second_precision
@classmethod
def _adapt_from_generic_interval(cls, interval):
return INTERVAL(day_precision=interval.day_precision,
second_precision=interval.second_precision)
@property
def _type_affinity(self):
return sqltypes.Interval
class ROWID(sqltypes.TypeEngine):
"""Oracle ROWID type.
When used in a cast() or similar, generates ROWID.
"""
__visit_name__ = 'ROWID'
class _OracleBoolean(sqltypes.Boolean):
def get_dbapi_type(self, dbapi):
return dbapi.NUMBER
colspecs = {
sqltypes.Boolean: _OracleBoolean,
sqltypes.Interval: INTERVAL,
sqltypes.DateTime: DATE
}
ischema_names = {
'VARCHAR2': VARCHAR,
'NVARCHAR2': NVARCHAR,
'CHAR': CHAR,
'DATE': DATE,
'NUMBER': NUMBER,
'BLOB': BLOB,
'BFILE': BFILE,
'CLOB': CLOB,
'NCLOB': NCLOB,
'TIMESTAMP': TIMESTAMP,
'TIMESTAMP WITH TIME ZONE': TIMESTAMP,
'INTERVAL DAY TO SECOND': INTERVAL,
'RAW': RAW,
'FLOAT': FLOAT,
'DOUBLE PRECISION': DOUBLE_PRECISION,
'LONG': LONG,
}
class OracleTypeCompiler(compiler.GenericTypeCompiler):
# Note:
# Oracle DATE == DATETIME
# Oracle does not allow milliseconds in DATE
# Oracle does not support TIME columns
def visit_datetime(self, type_, **kw):
return self.visit_DATE(type_, **kw)
def visit_float(self, type_, **kw):
return self.visit_FLOAT(type_, **kw)
def visit_unicode(self, type_, **kw):
if self.dialect._supports_nchar:
return self.visit_NVARCHAR2(type_, **kw)
else:
return self.visit_VARCHAR2(type_, **kw)
def visit_INTERVAL(self, type_, **kw):
return "INTERVAL DAY%s TO SECOND%s" % (
type_.day_precision is not None and
"(%d)" % type_.day_precision or
"",
type_.second_precision is not None and
"(%d)" % type_.second_precision or
"",
)
def visit_LONG(self, type_, **kw):
return "LONG"
def visit_TIMESTAMP(self, type_, **kw):
if type_.timezone:
return "TIMESTAMP WITH TIME ZONE"
else:
return "TIMESTAMP"
def visit_DOUBLE_PRECISION(self, type_, **kw):
return self._generate_numeric(type_, "DOUBLE PRECISION", **kw)
def visit_NUMBER(self, type_, **kw):
return self._generate_numeric(type_, "NUMBER", **kw)
def _generate_numeric(self, type_, name, precision=None, scale=None, **kw):
if precision is None:
precision = type_.precision
if scale is None:
scale = getattr(type_, 'scale', None)
if precision is None:
return name
elif scale is None:
n = "%(name)s(%(precision)s)"
return n % {'name': name, 'precision': precision}
else:
n = "%(name)s(%(precision)s, %(scale)s)"
return n % {'name': name, 'precision': precision, 'scale': scale}
def visit_string(self, type_, **kw):
return self.visit_VARCHAR2(type_, **kw)
def visit_VARCHAR2(self, type_, **kw):
return self._visit_varchar(type_, '', '2')
def visit_NVARCHAR2(self, type_, **kw):
return self._visit_varchar(type_, 'N', '2')
visit_NVARCHAR = visit_NVARCHAR2
def visit_VARCHAR(self, type_, **kw):
return self._visit_varchar(type_, '', '')
def _visit_varchar(self, type_, n, num):
if not type_.length:
return "%(n)sVARCHAR%(two)s" % {'two': num, 'n': n}
elif not n and self.dialect._supports_char_length:
varchar = "VARCHAR%(two)s(%(length)s CHAR)"
return varchar % {'length': type_.length, 'two': num}
else:
varchar = "%(n)sVARCHAR%(two)s(%(length)s)"
return varchar % {'length': type_.length, 'two': num, 'n': n}
def visit_text(self, type_, **kw):
return self.visit_CLOB(type_, **kw)
def visit_unicode_text(self, type_, **kw):
if self.dialect._supports_nchar:
return self.visit_NCLOB(type_, **kw)
else:
return self.visit_CLOB(type_, **kw)
def visit_large_binary(self, type_, **kw):
return self.visit_BLOB(type_, **kw)
def visit_big_integer(self, type_, **kw):
return self.visit_NUMBER(type_, precision=19, **kw)
def visit_boolean(self, type_, **kw):
return self.visit_SMALLINT(type_, **kw)
def visit_RAW(self, type_, **kw):
if type_.length:
return "RAW(%(length)s)" % {'length': type_.length}
else:
return "RAW"
def visit_ROWID(self, type_, **kw):
return "ROWID"
class OracleCompiler(compiler.SQLCompiler):
"""Oracle compiler modifies the lexical structure of Select
statements to work under non-ANSI configured Oracle databases, if
the use_ansi flag is False.
"""
compound_keywords = util.update_copy(
compiler.SQLCompiler.compound_keywords,
{
expression.CompoundSelect.EXCEPT: 'MINUS'
}
)
def __init__(self, *args, **kwargs):
self.__wheres = {}
self._quoted_bind_names = {}
super(OracleCompiler, self).__init__(*args, **kwargs)
def visit_mod_binary(self, binary, operator, **kw):
return "mod(%s, %s)" % (self.process(binary.left, **kw),
self.process(binary.right, **kw))
def visit_now_func(self, fn, **kw):
return "CURRENT_TIMESTAMP"
def visit_char_length_func(self, fn, **kw):
return "LENGTH" + self.function_argspec(fn, **kw)
def visit_match_op_binary(self, binary, operator, **kw):
return "CONTAINS (%s, %s)" % (self.process(binary.left),
self.process(binary.right))
def visit_true(self, expr, **kw):
return '1'
def visit_false(self, expr, **kw):
return '0'
def get_cte_preamble(self, recursive):
return "WITH"
def get_select_hint_text(self, byfroms):
return " ".join(
"/*+ %s */" % text for table, text in byfroms.items()
)
def function_argspec(self, fn, **kw):
if len(fn.clauses) > 0 or fn.name.upper() not in NO_ARG_FNS:
return compiler.SQLCompiler.function_argspec(self, fn, **kw)
else:
return ""
def default_from(self):
"""Called when a ``SELECT`` statement has no froms,
and no ``FROM`` clause is to be appended.
The Oracle compiler tacks a "FROM DUAL" to the statement.
"""
return " FROM DUAL"
def visit_join(self, join, **kwargs):
if self.dialect.use_ansi:
return compiler.SQLCompiler.visit_join(self, join, **kwargs)
else:
kwargs['asfrom'] = True
if isinstance(join.right, expression.FromGrouping):
right = join.right.element
else:
right = join.right
return self.process(join.left, **kwargs) + \
", " + self.process(right, **kwargs)
def _get_nonansi_join_whereclause(self, froms):
clauses = []
def visit_join(join):
if join.isouter:
def visit_binary(binary):
if binary.operator == sql_operators.eq:
if join.right.is_derived_from(binary.left.table):
binary.left = _OuterJoinColumn(binary.left)
elif join.right.is_derived_from(binary.right.table):
binary.right = _OuterJoinColumn(binary.right)
clauses.append(visitors.cloned_traverse(
join.onclause, {}, {'binary': visit_binary}))
else:
clauses.append(join.onclause)
for j in join.left, join.right:
if isinstance(j, expression.Join):
visit_join(j)
elif isinstance(j, expression.FromGrouping):
visit_join(j.element)
for f in froms:
if isinstance(f, expression.Join):
visit_join(f)
if not clauses:
return None
else:
return sql.and_(*clauses)
def visit_outer_join_column(self, vc, **kw):
return self.process(vc.column, **kw) + "(+)"
def visit_sequence(self, seq):
return (self.dialect.identifier_preparer.format_sequence(seq) +
".nextval")
def get_render_as_alias_suffix(self, alias_name_text):
"""Oracle doesn't like ``FROM table AS alias``"""
return " " + alias_name_text
def returning_clause(self, stmt, returning_cols):
columns = []
binds = []
for i, column in enumerate(
expression._select_iterables(returning_cols)):
if column.type._has_column_expression:
col_expr = column.type.column_expression(column)
else:
col_expr = column
outparam = sql.outparam("ret_%d" % i, type_=column.type)
self.binds[outparam.key] = outparam
binds.append(
self.bindparam_string(self._truncate_bindparam(outparam)))
columns.append(
self.process(col_expr, within_columns_clause=False))
self._add_to_result_map(
outparam.key, outparam.key,
(column, getattr(column, 'name', None),
getattr(column, 'key', None)),
column.type
)
return 'RETURNING ' + ', '.join(columns) + " INTO " + ", ".join(binds)
def _TODO_visit_compound_select(self, select):
"""Need to determine how to get ``LIMIT``/``OFFSET`` into a
``UNION`` for Oracle.
"""
pass
def visit_select(self, select, **kwargs):
"""Look for ``LIMIT`` and OFFSET in a select statement, and if
so tries to wrap it in a subquery with ``rownum`` criterion.
"""
if not getattr(select, '_oracle_visit', None):
if not self.dialect.use_ansi:
froms = self._display_froms_for_select(
select, kwargs.get('asfrom', False))
whereclause = self._get_nonansi_join_whereclause(froms)
if whereclause is not None:
select = select.where(whereclause)
select._oracle_visit = True
limit_clause = select._limit_clause
offset_clause = select._offset_clause
if limit_clause is not None or offset_clause is not None:
# See http://www.oracle.com/technology/oramag/oracle/06-sep/\
# o56asktom.html
#
# Generalized form of an Oracle pagination query:
# select ... from (
# select /*+ FIRST_ROWS(N) */ ...., rownum as ora_rn from
# ( select distinct ... where ... order by ...
# ) where ROWNUM <= :limit+:offset
# ) where ora_rn > :offset
# Outer select and "ROWNUM as ora_rn" can be dropped if
# limit=0
kwargs['select_wraps_for'] = select
select = select._generate()
select._oracle_visit = True
# Wrap the middle select and add the hint
limitselect = sql.select([c for c in select.c])
if limit_clause is not None and \
self.dialect.optimize_limits and \
select._simple_int_limit:
limitselect = limitselect.prefix_with(
"/*+ FIRST_ROWS(%d) */" %
select._limit)
limitselect._oracle_visit = True
limitselect._is_wrapper = True
# add expressions to accomodate FOR UPDATE OF
for_update = select._for_update_arg
if for_update is not None and for_update.of:
for_update = for_update._clone()
for_update._copy_internals()
for elem in for_update.of:
select.append_column(elem)
adapter = sql_util.ClauseAdapter(select)
for_update.of = [
adapter.traverse(elem)
for elem in for_update.of]
# If needed, add the limiting clause
if limit_clause is not None:
if not self.dialect.use_binds_for_limits:
# use simple int limits, will raise an exception
# if the limit isn't specified this way
max_row = select._limit
if offset_clause is not None:
max_row += select._offset
max_row = sql.literal_column("%d" % max_row)
else:
max_row = limit_clause
if offset_clause is not None:
max_row = max_row + offset_clause
limitselect.append_whereclause(
sql.literal_column("ROWNUM") <= max_row)
# If needed, add the ora_rn, and wrap again with offset.
if offset_clause is None:
limitselect._for_update_arg = for_update
select = limitselect
else:
limitselect = limitselect.column(
sql.literal_column("ROWNUM").label("ora_rn"))
limitselect._oracle_visit = True
limitselect._is_wrapper = True
offsetselect = sql.select(
[c for c in limitselect.c if c.key != 'ora_rn'])
offsetselect._oracle_visit = True
offsetselect._is_wrapper = True
if for_update is not None and for_update.of:
for elem in for_update.of:
if limitselect.corresponding_column(elem) is None:
limitselect.append_column(elem)
if not self.dialect.use_binds_for_limits:
offset_clause = sql.literal_column(
"%d" % select._offset)
offsetselect.append_whereclause(
sql.literal_column("ora_rn") > offset_clause)
offsetselect._for_update_arg = for_update
select = offsetselect
return compiler.SQLCompiler.visit_select(self, select, **kwargs)
def limit_clause(self, select, **kw):
return ""
def for_update_clause(self, select, **kw):
if self.is_subquery():
return ""
tmp = ' FOR UPDATE'
if select._for_update_arg.of:
tmp += ' OF ' + ', '.join(
self.process(elem, **kw) for elem in
select._for_update_arg.of
)
if select._for_update_arg.nowait:
tmp += " NOWAIT"
return tmp
class OracleDDLCompiler(compiler.DDLCompiler):
def define_constraint_cascades(self, constraint):
text = ""
if constraint.ondelete is not None:
text += " ON DELETE %s" % constraint.ondelete
# oracle has no ON UPDATE CASCADE -
# its only available via triggers
# http://asktom.oracle.com/tkyte/update_cascade/index.html
if constraint.onupdate is not None:
util.warn(
"Oracle does not contain native UPDATE CASCADE "
"functionality - onupdates will not be rendered for foreign "
"keys. Consider using deferrable=True, initially='deferred' "
"or triggers.")
return text
def visit_create_index(self, create):
index = create.element
self._verify_index_table(index)
preparer = self.preparer
text = "CREATE "
if index.unique:
text += "UNIQUE "
if index.dialect_options['oracle']['bitmap']:
text += "BITMAP "
text += "INDEX %s ON %s (%s)" % (
self._prepared_index_name(index, include_schema=True),
preparer.format_table(index.table, use_schema=True),
', '.join(
self.sql_compiler.process(
expr,
include_table=False, literal_binds=True)
for expr in index.expressions)
)
if index.dialect_options['oracle']['compress'] is not False:
if index.dialect_options['oracle']['compress'] is True:
text += " COMPRESS"
else:
text += " COMPRESS %d" % (
index.dialect_options['oracle']['compress']
)
return text
def post_create_table(self, table):
table_opts = []
opts = table.dialect_options['oracle']
if opts['on_commit']:
on_commit_options = opts['on_commit'].replace("_", " ").upper()
table_opts.append('\n ON COMMIT %s' % on_commit_options)
if opts['compress']:
if opts['compress'] is True:
table_opts.append("\n COMPRESS")
else:
table_opts.append("\n COMPRESS FOR %s" % (
opts['compress']
))
return ''.join(table_opts)
class OracleIdentifierPreparer(compiler.IdentifierPreparer):
reserved_words = set([x.lower() for x in RESERVED_WORDS])
illegal_initial_characters = set(
(str(dig) for dig in range(0, 10))).union(["_", "$"])
def _bindparam_requires_quotes(self, value):
"""Return True if the given identifier requires quoting."""
lc_value = value.lower()
return (lc_value in self.reserved_words
or value[0] in self.illegal_initial_characters
or not self.legal_characters.match(util.text_type(value))
)
def format_savepoint(self, savepoint):
name = re.sub(r'^_+', '', savepoint.ident)
return super(
OracleIdentifierPreparer, self).format_savepoint(savepoint, name)
class OracleExecutionContext(default.DefaultExecutionContext):
def fire_sequence(self, seq, type_):
return self._execute_scalar(
"SELECT " +
self.dialect.identifier_preparer.format_sequence(seq) +
".nextval FROM DUAL", type_)
class OracleDialect(default.DefaultDialect):
name = 'oracle'
supports_alter = True
supports_unicode_statements = False
supports_unicode_binds = False
max_identifier_length = 30
supports_sane_rowcount = True
supports_sane_multi_rowcount = False
supports_simple_order_by_label = False
supports_sequences = True
sequences_optional = False
postfetch_lastrowid = False
default_paramstyle = 'named'
colspecs = colspecs
ischema_names = ischema_names
requires_name_normalize = True
supports_default_values = False
supports_empty_insert = False
statement_compiler = OracleCompiler
ddl_compiler = OracleDDLCompiler
type_compiler = OracleTypeCompiler
preparer = OracleIdentifierPreparer
execution_ctx_cls = OracleExecutionContext
reflection_options = ('oracle_resolve_synonyms', )
construct_arguments = [
(sa_schema.Table, {
"resolve_synonyms": False,
"on_commit": None,
"compress": False
}),
(sa_schema.Index, {
"bitmap": False,
"compress": False
})
]
def __init__(self,
use_ansi=True,
optimize_limits=False,
use_binds_for_limits=True,
**kwargs):
default.DefaultDialect.__init__(self, **kwargs)
self.use_ansi = use_ansi
self.optimize_limits = optimize_limits
self.use_binds_for_limits = use_binds_for_limits
def initialize(self, connection):
super(OracleDialect, self).initialize(connection)
self.implicit_returning = self.__dict__.get(
'implicit_returning',
self.server_version_info > (10, )
)
if self._is_oracle_8:
self.colspecs = self.colspecs.copy()
self.colspecs.pop(sqltypes.Interval)
self.use_ansi = False
@property
def _is_oracle_8(self):
return self.server_version_info and \
self.server_version_info < (9, )
@property
def _supports_table_compression(self):
return self.server_version_info and \
self.server_version_info >= (9, 2, )
@property
def _supports_table_compress_for(self):
return self.server_version_info and \
self.server_version_info >= (11, )
@property
def _supports_char_length(self):
return not self._is_oracle_8
@property
def _supports_nchar(self):
return not self._is_oracle_8
def do_release_savepoint(self, connection, name):
# Oracle does not support RELEASE SAVEPOINT
pass
def has_table(self, connection, table_name, schema=None):
if not schema:
schema = self.default_schema_name
cursor = connection.execute(
sql.text("SELECT table_name FROM all_tables "
"WHERE table_name = :name AND owner = :schema_name"),
name=self.denormalize_name(table_name),
schema_name=self.denormalize_name(schema))
return cursor.first() is not None
def has_sequence(self, connection, sequence_name, schema=None):
if not schema:
schema = self.default_schema_name
cursor = connection.execute(
sql.text("SELECT sequence_name FROM all_sequences "
"WHERE sequence_name = :name AND "
"sequence_owner = :schema_name"),
name=self.denormalize_name(sequence_name),
schema_name=self.denormalize_name(schema))
return cursor.first() is not None
def normalize_name(self, name):
if name is None:
return None
if util.py2k:
if isinstance(name, str):
name = name.decode(self.encoding)
if name.upper() == name and not \
self.identifier_preparer._requires_quotes(name.lower()):
return name.lower()
elif name.lower() == name:
return quoted_name(name, quote=True)
else:
return name
def denormalize_name(self, name):
if name is None:
return None
elif name.lower() == name and not \
self.identifier_preparer._requires_quotes(name.lower()):
name = name.upper()
if util.py2k:
if not self.supports_unicode_binds:
name = name.encode(self.encoding)
else:
name = unicode(name)
return name
def _get_default_schema_name(self, connection):
return self.normalize_name(
connection.execute('SELECT USER FROM DUAL').scalar())
def _resolve_synonym(self, connection, desired_owner=None,
desired_synonym=None, desired_table=None):
"""search for a local synonym matching the given desired owner/name.
if desired_owner is None, attempts to locate a distinct owner.
returns the actual name, owner, dblink name, and synonym name if
found.
"""
q = "SELECT owner, table_owner, table_name, db_link, "\
"synonym_name FROM all_synonyms WHERE "
clauses = []
params = {}
if desired_synonym:
clauses.append("synonym_name = :synonym_name")
params['synonym_name'] = desired_synonym
if desired_owner:
clauses.append("owner = :desired_owner")
params['desired_owner'] = desired_owner
if desired_table:
clauses.append("table_name = :tname")
params['tname'] = desired_table
q += " AND ".join(clauses)
result = connection.execute(sql.text(q), **params)
if desired_owner:
row = result.first()
if row:
return (row['table_name'], row['table_owner'],
row['db_link'], row['synonym_name'])
else:
return None, None, None, None
else:
rows = result.fetchall()
if len(rows) > 1:
raise AssertionError(
"There are multiple tables visible to the schema, you "
"must specify owner")
elif len(rows) == 1:
row = rows[0]
return (row['table_name'], row['table_owner'],
row['db_link'], row['synonym_name'])
else:
return None, None, None, None
@reflection.cache
def _prepare_reflection_args(self, connection, table_name, schema=None,
resolve_synonyms=False, dblink='', **kw):
if resolve_synonyms:
actual_name, owner, dblink, synonym = self._resolve_synonym(
connection,
desired_owner=self.denormalize_name(schema),
desired_synonym=self.denormalize_name(table_name)
)
else:
actual_name, owner, dblink, synonym = None, None, None, None
if not actual_name:
actual_name = self.denormalize_name(table_name)
if dblink:
# using user_db_links here since all_db_links appears
# to have more restricted permissions.
# http://docs.oracle.com/cd/B28359_01/server.111/b28310/ds_admin005.htm
# will need to hear from more users if we are doing
# the right thing here. See [ticket:2619]
owner = connection.scalar(
sql.text("SELECT username FROM user_db_links "
"WHERE db_link=:link"), link=dblink)
dblink = "@" + dblink
elif not owner:
owner = self.denormalize_name(schema or self.default_schema_name)
return (actual_name, owner, dblink or '', synonym)
@reflection.cache
def get_schema_names(self, connection, **kw):
s = "SELECT username FROM all_users ORDER BY username"
cursor = connection.execute(s,)
return [self.normalize_name(row[0]) for row in cursor]
@reflection.cache
def get_table_names(self, connection, schema=None, **kw):
schema = self.denormalize_name(schema or self.default_schema_name)
# note that table_names() isn't loading DBLINKed or synonym'ed tables
if schema is None:
schema = self.default_schema_name
s = sql.text(
"SELECT table_name FROM all_tables "
"WHERE nvl(tablespace_name, 'no tablespace') NOT IN "
"('SYSTEM', 'SYSAUX') "
"AND OWNER = :owner "
"AND IOT_NAME IS NULL "
"AND DURATION IS NULL")
cursor = connection.execute(s, owner=schema)
return [self.normalize_name(row[0]) for row in cursor]
@reflection.cache
def get_temp_table_names(self, connection, **kw):
schema = self.denormalize_name(self.default_schema_name)
s = sql.text(
"SELECT table_name FROM all_tables "
"WHERE nvl(tablespace_name, 'no tablespace') NOT IN "
"('SYSTEM', 'SYSAUX') "
"AND OWNER = :owner "
"AND IOT_NAME IS NULL "
"AND DURATION IS NOT NULL")
cursor = connection.execute(s, owner=schema)
return [self.normalize_name(row[0]) for row in cursor]
@reflection.cache
def get_view_names(self, connection, schema=None, **kw):
schema = self.denormalize_name(schema or self.default_schema_name)
s = sql.text("SELECT view_name FROM all_views WHERE owner = :owner")
cursor = connection.execute(s, owner=self.denormalize_name(schema))
return [self.normalize_name(row[0]) for row in cursor]
@reflection.cache
def get_table_options(self, connection, table_name, schema=None, **kw):
options = {}
resolve_synonyms = kw.get('oracle_resolve_synonyms', False)
dblink = kw.get('dblink', '')
info_cache = kw.get('info_cache')
(table_name, schema, dblink, synonym) = \
self._prepare_reflection_args(connection, table_name, schema,
resolve_synonyms, dblink,
info_cache=info_cache)
params = {"table_name": table_name}
columns = ["table_name"]
if self._supports_table_compression:
columns.append("compression")
if self._supports_table_compress_for:
columns.append("compress_for")
text = "SELECT %(columns)s "\
"FROM ALL_TABLES%(dblink)s "\
"WHERE table_name = :table_name"
if schema is not None:
params['owner'] = schema
text += " AND owner = :owner "
text = text % {'dblink': dblink, 'columns': ", ".join(columns)}
result = connection.execute(sql.text(text), **params)
enabled = dict(DISABLED=False, ENABLED=True)
row = result.first()
if row:
if "compression" in row and enabled.get(row.compression, False):
if "compress_for" in row:
options['oracle_compress'] = row.compress_for
else:
options['oracle_compress'] = True
return options
@reflection.cache
def get_columns(self, connection, table_name, schema=None, **kw):
"""
kw arguments can be:
oracle_resolve_synonyms
dblink
"""
resolve_synonyms = kw.get('oracle_resolve_synonyms', False)
dblink = kw.get('dblink', '')
info_cache = kw.get('info_cache')
(table_name, schema, dblink, synonym) = \
self._prepare_reflection_args(connection, table_name, schema,
resolve_synonyms, dblink,
info_cache=info_cache)
columns = []
if self._supports_char_length:
char_length_col = 'char_length'
else:
char_length_col = 'data_length'
params = {"table_name": table_name}
text = "SELECT column_name, data_type, %(char_length_col)s, "\
"data_precision, data_scale, "\
"nullable, data_default FROM ALL_TAB_COLUMNS%(dblink)s "\
"WHERE table_name = :table_name"
if schema is not None:
params['owner'] = schema
text += " AND owner = :owner "
text += " ORDER BY column_id"
text = text % {'dblink': dblink, 'char_length_col': char_length_col}
c = connection.execute(sql.text(text), **params)
for row in c:
(colname, orig_colname, coltype, length, precision, scale, nullable, default) = \
(self.normalize_name(row[0]), row[0], row[1], row[
2], row[3], row[4], row[5] == 'Y', row[6])
if coltype == 'NUMBER':
coltype = NUMBER(precision, scale)
elif coltype in ('VARCHAR2', 'NVARCHAR2', 'CHAR'):
coltype = self.ischema_names.get(coltype)(length)
elif 'WITH TIME ZONE' in coltype:
coltype = TIMESTAMP(timezone=True)
else:
coltype = re.sub(r'\(\d+\)', '', coltype)
try:
coltype = self.ischema_names[coltype]
except KeyError:
util.warn("Did not recognize type '%s' of column '%s'" %
(coltype, colname))
coltype = sqltypes.NULLTYPE
cdict = {
'name': colname,
'type': coltype,
'nullable': nullable,
'default': default,
'autoincrement': default is None
}
if orig_colname.lower() == orig_colname:
cdict['quote'] = True
columns.append(cdict)
return columns
@reflection.cache
def get_indexes(self, connection, table_name, schema=None,
resolve_synonyms=False, dblink='', **kw):
info_cache = kw.get('info_cache')
(table_name, schema, dblink, synonym) = \
self._prepare_reflection_args(connection, table_name, schema,
resolve_synonyms, dblink,
info_cache=info_cache)
indexes = []
params = {'table_name': table_name}
text = \
"SELECT a.index_name, a.column_name, "\
"\nb.index_type, b.uniqueness, b.compression, b.prefix_length "\
"\nFROM ALL_IND_COLUMNS%(dblink)s a, "\
"\nALL_INDEXES%(dblink)s b "\
"\nWHERE "\
"\na.index_name = b.index_name "\
"\nAND a.table_owner = b.table_owner "\
"\nAND a.table_name = b.table_name "\
"\nAND a.table_name = :table_name "
if schema is not None:
params['schema'] = schema
text += "AND a.table_owner = :schema "
text += "ORDER BY a.index_name, a.column_position"
text = text % {'dblink': dblink}
q = sql.text(text)
rp = connection.execute(q, **params)
indexes = []
last_index_name = None
pk_constraint = self.get_pk_constraint(
connection, table_name, schema, resolve_synonyms=resolve_synonyms,
dblink=dblink, info_cache=kw.get('info_cache'))
pkeys = pk_constraint['constrained_columns']
uniqueness = dict(NONUNIQUE=False, UNIQUE=True)
enabled = dict(DISABLED=False, ENABLED=True)
oracle_sys_col = re.compile(r'SYS_NC\d+\$', re.IGNORECASE)
def upper_name_set(names):
return set([i.upper() for i in names])
pk_names = upper_name_set(pkeys)
def remove_if_primary_key(index):
# don't include the primary key index
if index is not None and \
upper_name_set(index['column_names']) == pk_names:
indexes.pop()
index = None
for rset in rp:
if rset.index_name != last_index_name:
remove_if_primary_key(index)
index = dict(name=self.normalize_name(rset.index_name),
column_names=[], dialect_options={})
indexes.append(index)
index['unique'] = uniqueness.get(rset.uniqueness, False)
if rset.index_type in ('BITMAP', 'FUNCTION-BASED BITMAP'):
index['dialect_options']['oracle_bitmap'] = True
if enabled.get(rset.compression, False):
index['dialect_options']['oracle_compress'] = rset.prefix_length
# filter out Oracle SYS_NC names. could also do an outer join
# to the all_tab_columns table and check for real col names there.
if not oracle_sys_col.match(rset.column_name):
index['column_names'].append(
self.normalize_name(rset.column_name))
last_index_name = rset.index_name
remove_if_primary_key(index)
return indexes
@reflection.cache
def _get_constraint_data(self, connection, table_name, schema=None,
dblink='', **kw):
params = {'table_name': table_name}
text = \
"SELECT"\
"\nac.constraint_name,"\
"\nac.constraint_type,"\
"\nloc.column_name AS local_column,"\
"\nrem.table_name AS remote_table,"\
"\nrem.column_name AS remote_column,"\
"\nrem.owner AS remote_owner,"\
"\nloc.position as loc_pos,"\
"\nrem.position as rem_pos"\
"\nFROM all_constraints%(dblink)s ac,"\
"\nall_cons_columns%(dblink)s loc,"\
"\nall_cons_columns%(dblink)s rem"\
"\nWHERE ac.table_name = :table_name"\
"\nAND ac.constraint_type IN ('R','P')"
if schema is not None:
params['owner'] = schema
text += "\nAND ac.owner = :owner"
text += \
"\nAND ac.owner = loc.owner"\
"\nAND ac.constraint_name = loc.constraint_name"\
"\nAND ac.r_owner = rem.owner(+)"\
"\nAND ac.r_constraint_name = rem.constraint_name(+)"\
"\nAND (rem.position IS NULL or loc.position=rem.position)"\
"\nORDER BY ac.constraint_name, loc.position"
text = text % {'dblink': dblink}
rp = connection.execute(sql.text(text), **params)
constraint_data = rp.fetchall()
return constraint_data
@reflection.cache
def get_pk_constraint(self, connection, table_name, schema=None, **kw):
resolve_synonyms = kw.get('oracle_resolve_synonyms', False)
dblink = kw.get('dblink', '')
info_cache = kw.get('info_cache')
(table_name, schema, dblink, synonym) = \
self._prepare_reflection_args(connection, table_name, schema,
resolve_synonyms, dblink,
info_cache=info_cache)
pkeys = []
constraint_name = None
constraint_data = self._get_constraint_data(
connection, table_name, schema, dblink,
info_cache=kw.get('info_cache'))
for row in constraint_data:
(cons_name, cons_type, local_column, remote_table, remote_column, remote_owner) = \
row[0:2] + tuple([self.normalize_name(x) for x in row[2:6]])
if cons_type == 'P':
if constraint_name is None:
constraint_name = self.normalize_name(cons_name)
pkeys.append(local_column)
return {'constrained_columns': pkeys, 'name': constraint_name}
@reflection.cache
def get_foreign_keys(self, connection, table_name, schema=None, **kw):
"""
kw arguments can be:
oracle_resolve_synonyms
dblink
"""
requested_schema = schema # to check later on
resolve_synonyms = kw.get('oracle_resolve_synonyms', False)
dblink = kw.get('dblink', '')
info_cache = kw.get('info_cache')
(table_name, schema, dblink, synonym) = \
self._prepare_reflection_args(connection, table_name, schema,
resolve_synonyms, dblink,
info_cache=info_cache)
constraint_data = self._get_constraint_data(
connection, table_name, schema, dblink,
info_cache=kw.get('info_cache'))
def fkey_rec():
return {
'name': None,
'constrained_columns': [],
'referred_schema': None,
'referred_table': None,
'referred_columns': []
}
fkeys = util.defaultdict(fkey_rec)
for row in constraint_data:
(cons_name, cons_type, local_column, remote_table, remote_column, remote_owner) = \
row[0:2] + tuple([self.normalize_name(x) for x in row[2:6]])
if cons_type == 'R':
if remote_table is None:
# ticket 363
util.warn(
("Got 'None' querying 'table_name' from "
"all_cons_columns%(dblink)s - does the user have "
"proper rights to the table?") % {'dblink': dblink})
continue
rec = fkeys[cons_name]
rec['name'] = cons_name
local_cols, remote_cols = rec[
'constrained_columns'], rec['referred_columns']
if not rec['referred_table']:
if resolve_synonyms:
ref_remote_name, ref_remote_owner, ref_dblink, ref_synonym = \
self._resolve_synonym(
connection,
desired_owner=self.denormalize_name(
remote_owner),
desired_table=self.denormalize_name(
remote_table)
)
if ref_synonym:
remote_table = self.normalize_name(ref_synonym)
remote_owner = self.normalize_name(
ref_remote_owner)
rec['referred_table'] = remote_table
if requested_schema is not None or \
self.denormalize_name(remote_owner) != schema:
rec['referred_schema'] = remote_owner
local_cols.append(local_column)
remote_cols.append(remote_column)
return list(fkeys.values())
@reflection.cache
def get_view_definition(self, connection, view_name, schema=None,
resolve_synonyms=False, dblink='', **kw):
info_cache = kw.get('info_cache')
(view_name, schema, dblink, synonym) = \
self._prepare_reflection_args(connection, view_name, schema,
resolve_synonyms, dblink,
info_cache=info_cache)
params = {'view_name': view_name}
text = "SELECT text FROM all_views WHERE view_name=:view_name"
if schema is not None:
text += " AND owner = :schema"
params['schema'] = schema
rp = connection.execute(sql.text(text), **params).scalar()
if rp:
if util.py2k:
rp = rp.decode(self.encoding)
return rp
else:
return None
class _OuterJoinColumn(sql.ClauseElement):
__visit_name__ = 'outer_join_column'
def __init__(self, column):
self.column = column
| 36.272669
| 95
| 0.601975
|
7dacd8e046ccc823467161e4d0e59a7887a035fc
| 4,717
|
py
|
Python
|
tests/python/unittest/test_tir_transform_coproc_sync.py
|
XiaoSong9905/tvm
|
48940f697e15d5b50fa1f032003e6c700ae1e423
|
[
"Apache-2.0"
] | 4,640
|
2017-08-17T19:22:15.000Z
|
2019-11-04T15:29:46.000Z
|
tests/python/unittest/test_tir_transform_coproc_sync.py
|
XiaoSong9905/tvm
|
48940f697e15d5b50fa1f032003e6c700ae1e423
|
[
"Apache-2.0"
] | 3,022
|
2020-11-24T14:02:31.000Z
|
2022-03-31T23:55:31.000Z
|
tests/python/unittest/test_tir_transform_coproc_sync.py
|
XiaoSong9905/tvm
|
48940f697e15d5b50fa1f032003e6c700ae1e423
|
[
"Apache-2.0"
] | 1,352
|
2017-08-17T19:30:38.000Z
|
2019-11-04T16:09:29.000Z
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import tvm
from tvm import te
# register the ops
tvm.ir.register_op_attr("tir.cop.coproc_sync", "TGlobalSymbol", "coproc_sync")
tvm.ir.register_op_attr("tir.cop.coproc_read_barrier", "TGlobalSymbol", "coproc_readb")
tvm.ir.register_op_attr("tir.cop.coproc_write_barrier", "TGlobalSymbol", "coproc_writeb")
tvm.ir.register_op_attr("tir.cop.coproc_dep_push", "TGlobalSymbol", "coproc_dep_push")
tvm.ir.register_op_attr("tir.cop.coproc_dep_pop", "TGlobalSymbol", "coproc_dep_pop")
def test_coproc_sync():
@tvm.register_func("tvm.info.mem.global.cache")
def meminfo_cache():
return tvm.ir.make_node(
"MemoryInfo",
unit_bits=8,
max_simd_bits=32,
max_num_bits=128,
head_address=tvm.tir.call_extern("handle", "global_cache"),
)
ib = tvm.tir.ir_builder.create()
n = te.size_var("n")
cp = te.thread_axis((0, 1), "cop")
A = ib.allocate("float32", 128, name="A", scope="global.cache")
with ib.for_range(0, n, name="i") as i:
A[i] = A[i] + 1
with ib.for_range(0, 8, name="k") as k:
with ib.for_range(0, 10, name="j") as j:
ib.scope_attr(cp, "coproc_scope", 1)
A[j] = A[j + k * 10] + 2
stmt = ib.get()
mod = tvm.IRModule.from_expr(tvm.tir.PrimFunc([n], stmt))
stmt = tvm.tir.transform.CoProcSync()(mod)["main"].body
body = stmt.body.body
blist = tvm.tir.stmt_list(body)
assert blist[1].value.op.same_as(tvm.ir.Op.get("tir.cop.coproc_read_barrier"))
assert blist[1].value.args[3].value == 80
assert blist[-2].value.op.same_as(tvm.ir.Op.get("tir.cop.coproc_sync"))
assert blist[-1].value.op.same_as(tvm.ir.Op.get("tir.cop.coproc_write_barrier"))
assert blist[-1].value.args[3].value == 10
def test_coproc_sync2():
ib = tvm.tir.ir_builder.create()
n = te.size_var("n")
cp = te.thread_axis((0, 1), "cop")
ty = te.thread_axis("cthread")
A = ib.allocate("float32", 128, name="A")
ib.scope_attr(ty, "virtual_thread", 2)
with ib.new_scope():
ib.scope_attr(cp, "coproc_scope", 2)
A[ty] = 0.0
with ib.for_range(0, n, name="i") as i:
with ib.new_scope():
ib.scope_attr(cp, "coproc_scope", 1)
A[ty] = 1.0
with ib.new_scope():
ib.scope_attr(cp, "coproc_scope", 2)
A[ty] = 1.0
stmt = ib.get()
mod = tvm.IRModule.from_expr(tvm.tir.PrimFunc([n], stmt))
stmt = tvm.tir.transform.CoProcSync()(mod)["main"].body
def test_coproc_sync3():
def __check_list(tvm_array, py_list):
for ti, li in zip(tvm_array, py_list):
if ti.value != li:
return False
return True
ib = tvm.tir.ir_builder.create()
n = te.size_var("n")
cp = te.thread_axis((0, 1), "cop")
A = ib.allocate("float32", 128, name="A", scope="global.cache")
with ib.for_range(0, n, name="i") as i:
with ib.for_range(0, n, name="i") as j:
with ib.new_scope():
ib.scope_attr(cp, "coproc_scope", 1)
A[i] = 1.0
with ib.new_scope():
ib.scope_attr(cp, "coproc_scope", 2)
A[i] = 1.0
with ib.new_scope():
ib.scope_attr(cp, "coproc_scope", 3)
A[0] = 0.0
stmt = ib.get()
mod = tvm.IRModule.from_expr(tvm.tir.PrimFunc([n], stmt))
stmt = tvm.tir.transform.CoProcSync()(mod)["main"].body
slist = tvm.tir.stmt_list(stmt[0].body)
push_st = slist[2]
slist = tvm.tir.stmt_list(slist[-1])
pop_st = slist[0].body[0]
assert push_st.value.op.same_as(tvm.ir.Op.get("tir.cop.coproc_dep_push"))
assert __check_list(push_st.value.args, [2, 3])
assert pop_st.value.op.same_as(tvm.ir.Op.get("tir.cop.coproc_dep_pop"))
assert __check_list(pop_st.value.args, [2, 3])
if __name__ == "__main__":
test_coproc_sync()
test_coproc_sync2()
test_coproc_sync3()
| 36.284615
| 89
| 0.634513
|
5f173a5909f012704284dcf04812fc2c5937c2b5
| 18,128
|
py
|
Python
|
sgdml/utils/perm.py
|
rangsimanketkaew/sGDML
|
3f06e0de33462afdfaecb310ac2d4e073b6ed2cf
|
[
"MIT"
] | 72
|
2018-07-11T18:46:17.000Z
|
2022-03-13T03:33:09.000Z
|
sgdml/utils/perm.py
|
rangsimanketkaew/sGDML
|
3f06e0de33462afdfaecb310ac2d4e073b6ed2cf
|
[
"MIT"
] | 11
|
2018-09-14T18:43:03.000Z
|
2021-06-15T12:21:52.000Z
|
sgdml/utils/perm.py
|
rangsimanketkaew/sGDML
|
3f06e0de33462afdfaecb310ac2d4e073b6ed2cf
|
[
"MIT"
] | 31
|
2018-10-29T08:06:00.000Z
|
2022-03-25T13:53:43.000Z
|
#!/usr/bin/python
# MIT License
#
# Copyright (c) 2018-2021 Stefan Chmiela
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from __future__ import print_function
import multiprocessing as mp
Pool = mp.get_context('fork').Pool
import sys
import timeit
from functools import partial
import numpy as np
import scipy.optimize
import scipy.spatial.distance
from scipy.sparse import csr_matrix
from scipy.sparse.csgraph import minimum_spanning_tree
from .. import DONE, NOT_DONE
from .desc import Desc
from . import ui
glob = {}
def share_array(arr_np, typecode):
arr = mp.RawArray(typecode, arr_np.ravel())
return arr, arr_np.shape
def _bipartite_match_wkr(i, n_train, same_z_cost):
global glob
adj_set = np.frombuffer(glob['adj_set']).reshape(glob['adj_set_shape'])
v_set = np.frombuffer(glob['v_set']).reshape(glob['v_set_shape'])
match_cost = np.frombuffer(glob['match_cost']).reshape(glob['match_cost_shape'])
adj_i = scipy.spatial.distance.squareform(adj_set[i, :])
v_i = v_set[i, :, :]
match_perms = {}
for j in range(i + 1, n_train):
adj_j = scipy.spatial.distance.squareform(adj_set[j, :])
v_j = v_set[j, :, :]
cost = -np.fabs(v_i).dot(np.fabs(v_j).T)
cost += same_z_cost * np.max(np.abs(cost))
_, perm = scipy.optimize.linear_sum_assignment(cost)
adj_i_perm = adj_i[:, perm]
adj_i_perm = adj_i_perm[perm, :]
score_before = np.linalg.norm(adj_i - adj_j)
score = np.linalg.norm(adj_i_perm - adj_j)
match_cost[i, j] = score
if score >= score_before:
match_cost[i, j] = score_before
elif not np.isclose(score_before, score): # otherwise perm is identity
match_perms[i, j] = perm
return match_perms
def bipartite_match(R, z, lat_and_inv=None, max_processes=None, callback=None):
global glob
n_train, n_atoms, _ = R.shape
# penalty matrix for mixing atom species
same_z_cost = np.repeat(z[:, None], len(z), axis=1) - z
same_z_cost[same_z_cost != 0] = 1
# NEW
# penalty matrix for mixing differently bonded atoms
# NOTE: needs ASE, expects R to be in angstrom, does not support bond breaking
# from ase import Atoms
# from ase.geometry.analysis import Analysis
# atoms = Atoms(
# z, positions=R[0]
# ) # only use first molecule in dataset to find connected components (fix me later, maybe) # *0.529177249
# bonds = Analysis(atoms).all_bonds[0]
# #n_bonds = np.array([len(bonds_i) for bonds_i in bonds])
# same_bonding_cost = np.zeros((n_atoms, n_atoms))
# for i in range(n_atoms):
# bi = bonds[i]
# z_bi = z[bi]
# for j in range(i+1,n_atoms):
# bj = bonds[j]
# z_bj = z[bj]
# if set(z_bi) == set(z_bj):
# same_bonding_cost[i,j] = 1
# same_bonding_cost += same_bonding_cost.T
# same_bonding_cost[np.diag_indices(n_atoms)] = 1
# same_bonding_cost = 1-same_bonding_cost
#set(a) & set(b)
#same_bonding_cost = np.repeat(n_bonds[:, None], len(n_bonds), axis=1) - n_bonds
#same_bonding_cost[same_bonding_cost != 0] = 1
# NEW
match_cost = np.zeros((n_train, n_train))
desc = Desc(n_atoms, max_processes=max_processes)
adj_set = np.empty((n_train, desc.dim))
v_set = np.empty((n_train, n_atoms, n_atoms))
for i in range(n_train):
r = np.squeeze(R[i, :, :])
if lat_and_inv is None:
adj = scipy.spatial.distance.pdist(r, 'euclidean')
# from ase import Atoms
# from ase.geometry.analysis import Analysis
# atoms = Atoms(
# z, positions=r
# ) # only use first molecule in dataset to find connected components (fix me later, maybe) # *0.529177249
# bonds = Analysis(atoms).all_bonds[0]
#adj = scipy.spatial.distance.squareform(adj)
#bonded = np.zeros((z.size, z.size))
#for j, bonded_to in enumerate(bonds):
#inv_bonded_to = np.arange(n_atoms)
#inv_bonded_to[bonded_to] = 0
#adj[j, inv_bonded_to] = 0
# bonded[j, bonded_to] = 1
# bonded = bonded + bonded.T
# print(bonded)
else:
adj = scipy.spatial.distance.pdist(
r, lambda u, v: np.linalg.norm(desc.pbc_diff(u - v, lat_and_inv))
)
w, v = np.linalg.eig(scipy.spatial.distance.squareform(adj))
v = v[:, w.argsort()[::-1]]
adj_set[i, :] = adj
v_set[i, :, :] = v
glob['adj_set'], glob['adj_set_shape'] = share_array(adj_set, 'd')
glob['v_set'], glob['v_set_shape'] = share_array(v_set, 'd')
glob['match_cost'], glob['match_cost_shape'] = share_array(match_cost, 'd')
if callback is not None:
callback = partial(callback, disp_str='Bi-partite matching')
start = timeit.default_timer()
pool = Pool(max_processes)
match_perms_all = {}
for i, match_perms in enumerate(
pool.imap_unordered(
partial(_bipartite_match_wkr, n_train=n_train, same_z_cost=same_z_cost),
list(range(n_train)),
)
):
match_perms_all.update(match_perms)
if callback is not None:
callback(i, n_train)
pool.close()
pool.join() # Wait for the worker processes to terminate (to measure total runtime correctly).
stop = timeit.default_timer()
dur_s = (stop - start) / 2
sec_disp_str = 'took {:.1f} s'.format(dur_s) if dur_s >= 0.1 else ''
if callback is not None:
callback(n_train, n_train, sec_disp_str=sec_disp_str)
match_cost = np.frombuffer(glob['match_cost']).reshape(glob['match_cost_shape'])
match_cost = match_cost + match_cost.T
match_cost[np.diag_indices_from(match_cost)] = np.inf
match_cost = csr_matrix(match_cost)
return match_perms_all, match_cost
def sync_perm_mat(match_perms_all, match_cost, n_atoms, callback=None):
if callback is not None:
callback = partial(
callback, disp_str='Multi-partite matching (permutation synchronization)'
)
callback(NOT_DONE)
tree = minimum_spanning_tree(match_cost, overwrite=True)
perms = np.arange(n_atoms, dtype=int)[None, :]
rows, cols = tree.nonzero()
for com in zip(rows, cols):
perm = match_perms_all.get(com)
if perm is not None:
perms = np.vstack((perms, perm))
perms = np.unique(perms, axis=0)
if callback is not None:
callback(DONE)
return perms
# convert permutation to dijoined cycles
def to_cycles(perm):
pi = {i: perm[i] for i in range(len(perm))}
cycles = []
while pi:
elem0 = next(iter(pi)) # arbitrary starting element
this_elem = pi[elem0]
next_item = pi[this_elem]
cycle = []
while True:
cycle.append(this_elem)
del pi[this_elem]
this_elem = next_item
if next_item in pi:
next_item = pi[next_item]
else:
break
cycles.append(cycle)
return cycles
# find permutation group with larges cardinality
# note: this is used if transitive closure fails (to salvage at least some permutations)
def salvage_subgroup(perms):
n_perms, n_atoms = perms.shape
lcms = []
for i in range(n_perms):
cy_lens = [len(cy) for cy in to_cycles(list(perms[i, :]))]
lcm = np.lcm.reduce(cy_lens)
lcms.append(lcm)
keep_idx = np.argmax(lcms)
perms = np.vstack((np.arange(n_atoms), perms[keep_idx,:]))
return perms
def complete_sym_group(perms, n_perms_max=None, disp_str='Permutation group completion', callback=None):
if callback is not None:
callback = partial(callback, disp_str=disp_str)
callback(NOT_DONE)
perm_added = True
while perm_added:
perm_added = False
n_perms = perms.shape[0]
for i in range(n_perms):
for j in range(n_perms):
new_perm = perms[i, perms[j, :]]
if not (new_perm == perms).all(axis=1).any():
perm_added = True
perms = np.vstack((perms, new_perm))
# Transitive closure is not converging! Give up and return identity permutation.
if n_perms_max is not None and perms.shape[0] == n_perms_max:
if callback is not None:
callback(
DONE,
sec_disp_str='transitive closure has failed',
done_with_warning=True,
)
return None
if callback is not None:
callback(
DONE,
sec_disp_str='found {:d} symmetries'.format(perms.shape[0]),
)
return perms
def find_perms(R, z, lat_and_inv=None, callback=None, max_processes=None):
m, n_atoms = R.shape[:2]
# Find matching for all pairs.
match_perms_all, match_cost = bipartite_match(
R, z, lat_and_inv, max_processes, callback=callback
)
# Remove inconsistencies.
match_perms = sync_perm_mat(match_perms_all, match_cost, n_atoms, callback=callback)
# Commplete symmetric group.
# Give up, if transitive closure yields more than 100 unique permutations.
sym_group_perms = complete_sym_group(match_perms, n_perms_max=100, callback=callback)
# Limit closure to largest cardinality permutation in the set to get at least some symmetries.
if sym_group_perms is None:
match_perms_subset = salvage_subgroup(match_perms)
sym_group_perms = complete_sym_group(match_perms_subset, n_perms_max=100, disp_str='Closure disaster recovery', callback=callback)
return sym_group_perms
def find_frag_perms(R, z, lat_and_inv=None, callback=None, max_processes=None):
from ase import Atoms
from ase.geometry.analysis import Analysis
from scipy.sparse.csgraph import connected_components
print('Finding permutable non-bonded fragments... (assumes Ang!)')
# TODO: positions must be in Angstrom for this to work!!
n_train, n_atoms = R.shape[:2]
atoms = Atoms(
z, positions=R[0]
) # only use first molecule in dataset to find connected components (fix me later, maybe) # *0.529177249
adj = Analysis(atoms).adjacency_matrix[0]
_, labels = connected_components(csgraph=adj, directed=False, return_labels=True)
frags = []
for label in np.unique(labels):
frags.append(np.where(labels == label)[0])
n_frags = len(frags)
if n_frags == n_atoms:
print(
'Skipping fragment symmetry search (something went wrong, e.g. length unit not in Angstroms, etc.)'
)
return [range(n_atoms)]
# print(labels)
# from . import ui, io
# xyz_str = io.generate_xyz_str(R[0][np.where(labels == 0)[0], :]*0.529177249, z[np.where(labels == 0)[0]])
# xyz_str = ui.indent_str(xyz_str, 2)
# sprint(xyz_str)
# NEW
# uniq_labels = np.unique(labels)
# R_cg = np.empty((R.shape[0], len(uniq_labels), R.shape[2]))
# z_frags = []
# z_cg = []
# for label in uniq_labels:
# frag_idxs = np.where(labels == label)[0]
# R_cg[:,label,:] = np.mean(R[:,frag_idxs,:], axis=1)
# z_frag = np.sort(z[frag_idxs])
# z_frag_label = 0
# if len(z_frags) == 0:
# z_frags.append(z_frag)
# else:
# z_frag_label = np.where(np.all(z_frags == z_frag, axis=1))[0]
# if len(z_frag_label) == 0: # not found
# z_frag_label = len(z_frags)
# z_frags.append(z_frag)
# else:
# z_frag_label = z_frag_label[0]
# z_cg.append(z_frag_label)
# print(z_cg)
# print(R_cg.shape)
# perms = find_perms(R_cg, np.array(z_cg), lat_and_inv=lat_and_inv, max_processes=max_processes)
# print('cg perms')
# print(perms)
# NEW
# print(n_frags)
print('| Found ' + str(n_frags) + ' disconnected fragments.')
n_frags_unique = 0 # number of unique fragments
# match fragments to find identical ones (allows permutations of fragments)
swap_perms = [np.arange(n_atoms)]
for f1 in range(n_frags):
for f2 in range(f1 + 1, n_frags):
sort_idx_f1 = np.argsort(z[frags[f1]])
sort_idx_f2 = np.argsort(z[frags[f2]])
inv_sort_idx_f2 = inv_perm(sort_idx_f2)
z1 = z[frags[f1]][sort_idx_f1]
z2 = z[frags[f2]][sort_idx_f2]
if np.array_equal(z1, z2): # fragment have the same composition
n_frags_unique += 1
for ri in range(
min(10, R.shape[0])
): # only use first molecule in dataset for matching (fix me later)
R_match1 = R[ri, frags[f1], :]
R_match2 = R[ri, frags[f2], :]
#if np.array_equal(z1, z2):
R_pair = np.concatenate(
(R_match1[None, sort_idx_f1, :], R_match2[None, sort_idx_f2, :])
)
perms = find_perms(
R_pair, z1, lat_and_inv=lat_and_inv, max_processes=max_processes
)
# embed local permutation into global context
for p in perms:
match_perm = sort_idx_f1[p][inv_sort_idx_f2]
swap_perm = np.arange(n_atoms)
swap_perm[frags[f1]] = frags[f2][match_perm]
swap_perm[frags[f2][match_perm]] = frags[f1]
swap_perms.append(swap_perm)
swap_perms = np.unique(np.array(swap_perms), axis=0)
print('| Found ' + str(n_frags_unique) + ' (likely to be) *unique* disconnected fragments.')
# commplete symmetric group
sym_group_perms = complete_sym_group(swap_perms)
print(
'| Found '
+ str(sym_group_perms.shape[0])
+ ' fragment permutations after closure.'
)
# match fragments with themselves (to find symmetries in each fragment)
if n_frags > 1:
print('| Matching individual fragments.')
for f in range(n_frags):
R_frag = R[:, frags[f], :]
z_frag = z[frags[f]]
# print(R_frag.shape)
# print(z_frag)
print(f)
perms = find_perms(
R_frag, z_frag, lat_and_inv=lat_and_inv, max_processes=max_processes
)
# print(f)
print(perms)
f = 0
perms = find_perms_via_alignment(R[0, :, :], frags[f], [215, 214, 210, 211], [209, 208, 212, 213], z, lat_and_inv=lat_and_inv, max_processes=max_processes)
#perms = find_perms_via_alignment(R[0, :, :], frags[f], [214, 215, 210, 211], [209, 208, 212, 213], z, lat_and_inv=lat_and_inv, max_processes=max_processes)
sym_group_perms = np.vstack((perms[None,:], sym_group_perms))
sym_group_perms = complete_sym_group(sym_group_perms, callback=callback)
#print(sym_group_perms.shape)
#import sys
#sys.exit()
return sym_group_perms
def find_perms_via_alignment(pts_full, frag_idxs, align_a_idxs, align_b_idxs, z, lat_and_inv=None, max_processes=None):
# 1. find rotatino that aligns points (Nx3 matrix) in 'align_a_idxs' with points in 'align_b_idxs'
# 2. rotate the whole thing
# find perms by matching those two structures
#align_a_ctr = np.mean(align_a_pts, axis=0)
#align_b_ctr = np.mean(align_b_pts, axis=0)
pts = pts_full[frag_idxs, :]
align_a_pts = pts[align_a_idxs,:]
align_b_pts = pts[align_b_idxs,:]
ctr = np.mean(pts, axis=0)
align_a_pts -= ctr
align_b_pts -= ctr
ab_cov = align_a_pts.T.dot(align_b_pts)
u, s, vh = np.linalg.svd(ab_cov)
R = u.dot(vh)
if np.linalg.det(R) < 0:
vh[2,:] *= -1 #multiply 3rd column of V by -1
R = u.dot(vh)
pts -= ctr
pts_R = pts.copy()
pts_R = R.dot(pts_R.T).T
pts += ctr
pts_R += ctr
pts_full_R = pts_full.copy()
pts_full_R[frag_idxs, :] = pts_R
R_pair = np.vstack((pts_full[None,:,:], pts_full_R[None,:,:]))
#from . import io
#xyz_str = io.generate_xyz_str(pts_full, z)
#print(xyz_str)
#xyz_str = io.generate_xyz_str(pts_full_R, z)
#print(xyz_str)
z_frag = z[frag_idxs]
adj = scipy.spatial.distance.cdist(R_pair[0], R_pair[1], 'euclidean')
_, perm = scipy.optimize.linear_sum_assignment(adj)
score_before = np.linalg.norm(adj)
adj_perm = scipy.spatial.distance.cdist(R_pair[0,:], R_pair[0, perm], 'euclidean')
score = np.linalg.norm(adj_perm)
#perms = find_perms(
# R_pair, z, lat_and_inv=lat_and_inv, max_processes=max_processes
#)
return perm
def inv_perm(perm):
inv_perm = np.empty(perm.size, perm.dtype)
inv_perm[perm] = np.arange(perm.size)
return inv_perm
| 29.914191
| 160
| 0.616339
|
1df70361b98cb23ca3c4396c93c48529e5fc5b95
| 288
|
py
|
Python
|
logiclibrary/storage.py
|
ld4apps/lda-serverlib
|
e76441e658c24d6cd2d7826f50e7b8c0dfc65350
|
[
"Apache-2.0"
] | 3
|
2015-07-08T12:35:08.000Z
|
2016-11-08T02:08:31.000Z
|
logiclibrary/storage.py
|
ld4apps/lda-serverlib
|
e76441e658c24d6cd2d7826f50e7b8c0dfc65350
|
[
"Apache-2.0"
] | null | null | null |
logiclibrary/storage.py
|
ld4apps/lda-serverlib
|
e76441e658c24d6cd2d7826f50e7b8c0dfc65350
|
[
"Apache-2.0"
] | 1
|
2015-02-23T17:49:55.000Z
|
2015-02-23T17:49:55.000Z
|
import os
import importlib
if 'OPERATION_PRIMITIVES' in os.environ:
import_name = os.environ['OPERATION_PRIMITIVES']
else:
import_name = 'operation_primitives' #assume it has the standard name and is on the python path
operation_primitives = importlib.import_module(import_name)
| 32
| 99
| 0.798611
|
5b31b2e81d0f39ce9fd8e1e0e5ff0bf9b4490673
| 1,659
|
py
|
Python
|
homeassistant/components/uptime/sensor.py
|
nickna/core
|
c682d5d5e430de52e3da7e06026cd8b4087e864f
|
[
"Apache-2.0"
] | 11
|
2018-02-16T15:35:47.000Z
|
2020-01-14T15:20:00.000Z
|
homeassistant/components/uptime/sensor.py
|
flexy2dd/core
|
1019ee22ff13e5f542e868179d791e6a0d87369a
|
[
"Apache-2.0"
] | 77
|
2020-07-16T16:43:09.000Z
|
2022-03-31T06:14:37.000Z
|
homeassistant/components/uptime/sensor.py
|
Vaarlion/core
|
f3de8b9f28de01abf72c0f5bb0b457eb1841f201
|
[
"Apache-2.0"
] | 6
|
2018-02-04T03:48:55.000Z
|
2022-01-24T20:37:04.000Z
|
"""Platform to retrieve uptime for Home Assistant."""
from __future__ import annotations
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA, SensorEntity
from homeassistant.const import (
CONF_NAME,
CONF_UNIT_OF_MEASUREMENT,
DEVICE_CLASS_TIMESTAMP,
)
from homeassistant.core import HomeAssistant
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
import homeassistant.util.dt as dt_util
DEFAULT_NAME = "Uptime"
PLATFORM_SCHEMA = vol.All(
cv.deprecated(CONF_UNIT_OF_MEASUREMENT),
PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_UNIT_OF_MEASUREMENT, default="days"): vol.All(
cv.string, vol.In(["minutes", "hours", "days", "seconds"])
),
}
),
)
async def async_setup_platform(
hass: HomeAssistant,
config: ConfigType,
async_add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up the uptime sensor platform."""
name = config[CONF_NAME]
async_add_entities([UptimeSensor(name)], True)
class UptimeSensor(SensorEntity):
"""Representation of an uptime sensor."""
def __init__(self, name: str) -> None:
"""Initialize the uptime sensor."""
self._attr_name: str = name
self._attr_device_class: str = DEVICE_CLASS_TIMESTAMP
self._attr_should_poll: bool = False
self._attr_state: str = dt_util.now().isoformat()
| 30.722222
| 76
| 0.717902
|
42b44ca16ad4a76ab7e8f539bed8f5f3ea7b82ab
| 235
|
py
|
Python
|
Exercicios/mundo1-exercicios-01-35/ex015.py
|
rafaelbarretomg/Curso-Python-3
|
7e772cbaf4c1e1bf7f1a9fb2925ec2e0eecf2998
|
[
"MIT"
] | null | null | null |
Exercicios/mundo1-exercicios-01-35/ex015.py
|
rafaelbarretomg/Curso-Python-3
|
7e772cbaf4c1e1bf7f1a9fb2925ec2e0eecf2998
|
[
"MIT"
] | null | null | null |
Exercicios/mundo1-exercicios-01-35/ex015.py
|
rafaelbarretomg/Curso-Python-3
|
7e772cbaf4c1e1bf7f1a9fb2925ec2e0eecf2998
|
[
"MIT"
] | null | null | null |
# Aluguel de carros custa 60 reais dia e 0.15 por km rodado
dias = int(input('Quantos dias alugados? '))
km = float(input('Quantos Km rodados? '))
aluguel = (dias * 60) + (km * 0.15)
print('O aluguel foi de R${:.2f}' .format(aluguel))
| 39.166667
| 59
| 0.66383
|
09601087429e188e4f256039ab06e7a51b9d3570
| 14,779
|
py
|
Python
|
chia/consensus/multiprocess_validation.py
|
Heather-Network/heather-blockchain
|
75a37c6f54d98b5c36c5e8cf5b27c5ed9ae977fa
|
[
"Apache-2.0"
] | 1
|
2021-09-19T18:57:21.000Z
|
2021-09-19T18:57:21.000Z
|
chia/consensus/multiprocess_validation.py
|
Heather-Network/heather-blockchain
|
75a37c6f54d98b5c36c5e8cf5b27c5ed9ae977fa
|
[
"Apache-2.0"
] | null | null | null |
chia/consensus/multiprocess_validation.py
|
Heather-Network/heather-blockchain
|
75a37c6f54d98b5c36c5e8cf5b27c5ed9ae977fa
|
[
"Apache-2.0"
] | null | null | null |
import asyncio
import logging
import traceback
from concurrent.futures.process import ProcessPoolExecutor
from dataclasses import dataclass
from typing import Dict, List, Optional, Sequence, Tuple, Union, Callable
from chia.consensus.block_header_validation import validate_finished_header_block
from chia.consensus.block_record import BlockRecord
from chia.consensus.blockchain_interface import BlockchainInterface
from chia.consensus.constants import ConsensusConstants
from chia.consensus.cost_calculator import NPCResult
from chia.consensus.difficulty_adjustment import get_next_sub_slot_iters_and_difficulty
from chia.consensus.full_block_to_block_record import block_to_block_record
from chia.consensus.get_block_challenge import get_block_challenge
from chia.consensus.pot_iterations import calculate_iterations_quality, is_overflow_block
from chia.full_node.mempool_check_conditions import get_name_puzzle_conditions
from chia.types.blockchain_format.coin import Coin
from chia.types.blockchain_format.sized_bytes import bytes32
from chia.types.blockchain_format.sub_epoch_summary import SubEpochSummary
from chia.types.full_block import FullBlock
from chia.types.generator_types import BlockGenerator
from chia.types.header_block import HeaderBlock
from chia.util.block_cache import BlockCache
from chia.util.errors import Err
from chia.util.generator_tools import get_block_header, tx_removals_and_additions
from chia.util.ints import uint16, uint64, uint32
from chia.util.streamable import Streamable, dataclass_from_dict, streamable
#log = logging.getLogger(_ _name__)
log = logging.getLogger("heather.consensus.multiprocesss_validationj")
@dataclass(frozen=True)
@streamable
class PreValidationResult(Streamable):
error: Optional[uint16]
required_iters: Optional[uint64] # Iff error is None
npc_result: Optional[NPCResult] # Iff error is None and block is a transaction block
def batch_pre_validate_blocks(
constants_dict: Dict,
blocks_pickled: Dict[bytes, bytes],
full_blocks_pickled: Optional[List[bytes]],
header_blocks_pickled: Optional[List[bytes]],
prev_transaction_generators: List[Optional[bytes]],
npc_results: Dict[uint32, bytes],
check_filter: bool,
expected_difficulty: List[uint64],
expected_sub_slot_iters: List[uint64],
) -> List[bytes]:
blocks = {}
for k, v in blocks_pickled.items():
blocks[k] = BlockRecord.from_bytes(v)
results: List[PreValidationResult] = []
constants: ConsensusConstants = dataclass_from_dict(ConsensusConstants, constants_dict)
if full_blocks_pickled is not None and header_blocks_pickled is not None:
assert ValueError("Only one should be passed here")
if full_blocks_pickled is not None:
for i in range(len(full_blocks_pickled)):
try:
block: FullBlock = FullBlock.from_bytes(full_blocks_pickled[i])
tx_additions: List[Coin] = []
removals: List[bytes32] = []
npc_result: Optional[NPCResult] = None
if block.height in npc_results:
npc_result = NPCResult.from_bytes(npc_results[block.height])
assert npc_result is not None
if npc_result.npc_list is not None:
removals, tx_additions = tx_removals_and_additions(npc_result.npc_list)
else:
removals, tx_additions = [], []
if block.transactions_generator is not None and npc_result is None:
prev_generator_bytes = prev_transaction_generators[i]
assert prev_generator_bytes is not None
assert block.transactions_info is not None
block_generator: BlockGenerator = BlockGenerator.from_bytes(prev_generator_bytes)
assert block_generator.program == block.transactions_generator
npc_result = get_name_puzzle_conditions(
block_generator,
min(constants.MAX_BLOCK_COST_CLVM, block.transactions_info.cost),
cost_per_byte=constants.COST_PER_BYTE,
safe_mode=True,
)
removals, tx_additions = tx_removals_and_additions(npc_result.npc_list)
header_block = get_block_header(block, tx_additions, removals)
required_iters, error = validate_finished_header_block(
constants,
BlockCache(blocks),
header_block,
check_filter,
expected_difficulty[i],
expected_sub_slot_iters[i],
)
error_int: Optional[uint16] = None
if error is not None:
error_int = uint16(error.code.value)
results.append(PreValidationResult(error_int, required_iters, npc_result))
except Exception:
error_stack = traceback.format_exc()
log.error(f"Exception: {error_stack}")
results.append(PreValidationResult(uint16(Err.UNKNOWN.value), None, None))
elif header_blocks_pickled is not None:
for i in range(len(header_blocks_pickled)):
try:
header_block = HeaderBlock.from_bytes(header_blocks_pickled[i])
required_iters, error = validate_finished_header_block(
constants,
BlockCache(blocks),
header_block,
check_filter,
expected_difficulty[i],
expected_sub_slot_iters[i],
)
error_int = None
if error is not None:
error_int = uint16(error.code.value)
results.append(PreValidationResult(error_int, required_iters, None))
except Exception:
error_stack = traceback.format_exc()
log.error(f"Exception: {error_stack}")
results.append(PreValidationResult(uint16(Err.UNKNOWN.value), None, None))
return [bytes(r) for r in results]
async def pre_validate_blocks_multiprocessing(
constants: ConsensusConstants,
constants_json: Dict,
block_records: BlockchainInterface,
blocks: Sequence[Union[FullBlock, HeaderBlock]],
pool: ProcessPoolExecutor,
check_filter: bool,
npc_results: Dict[uint32, NPCResult],
get_block_generator: Optional[Callable],
batch_size: int,
wp_summaries: Optional[List[SubEpochSummary]] = None,
) -> Optional[List[PreValidationResult]]:
"""
This method must be called under the blockchain lock
If all the full blocks pass pre-validation, (only validates header), returns the list of required iters.
if any validation issue occurs, returns False.
Args:
check_filter:
constants_json:
pool:
constants:
block_records:
blocks: list of full blocks to validate (must be connected to current chain)
npc_results
get_block_generator
"""
prev_b: Optional[BlockRecord] = None
# Collects all the recent blocks (up to the previous sub-epoch)
recent_blocks: Dict[bytes32, BlockRecord] = {}
recent_blocks_compressed: Dict[bytes32, BlockRecord] = {}
num_sub_slots_found = 0
num_blocks_seen = 0
if blocks[0].height > 0:
if not block_records.contains_block(blocks[0].prev_header_hash):
return [PreValidationResult(uint16(Err.INVALID_PREV_BLOCK_HASH.value), None, None)]
curr = block_records.block_record(blocks[0].prev_header_hash)
num_sub_slots_to_look_for = 3 if curr.overflow else 2
while (
curr.sub_epoch_summary_included is None
or num_blocks_seen < constants.NUMBER_OF_TIMESTAMPS
or num_sub_slots_found < num_sub_slots_to_look_for
) and curr.height > 0:
if num_blocks_seen < constants.NUMBER_OF_TIMESTAMPS or num_sub_slots_found < num_sub_slots_to_look_for:
recent_blocks_compressed[curr.header_hash] = curr
if curr.first_in_sub_slot:
assert curr.finished_challenge_slot_hashes is not None
num_sub_slots_found += len(curr.finished_challenge_slot_hashes)
recent_blocks[curr.header_hash] = curr
if curr.is_transaction_block:
num_blocks_seen += 1
curr = block_records.block_record(curr.prev_hash)
recent_blocks[curr.header_hash] = curr
recent_blocks_compressed[curr.header_hash] = curr
block_record_was_present = []
for block in blocks:
block_record_was_present.append(block_records.contains_block(block.header_hash))
diff_ssis: List[Tuple[uint64, uint64]] = []
for block in blocks:
if block.height != 0:
assert block_records.contains_block(block.prev_header_hash)
if prev_b is None:
prev_b = block_records.block_record(block.prev_header_hash)
sub_slot_iters, difficulty = get_next_sub_slot_iters_and_difficulty(
constants, len(block.finished_sub_slots) > 0, prev_b, block_records
)
overflow = is_overflow_block(constants, block.reward_chain_block.signage_point_index)
challenge = get_block_challenge(constants, block, BlockCache(recent_blocks), prev_b is None, overflow, False)
if block.reward_chain_block.challenge_chain_sp_vdf is None:
cc_sp_hash: bytes32 = challenge
else:
cc_sp_hash = block.reward_chain_block.challenge_chain_sp_vdf.output.get_hash()
q_str: Optional[bytes32] = block.reward_chain_block.proof_of_space.verify_and_get_quality_string(
constants, challenge, cc_sp_hash
)
if q_str is None:
for i, block_i in enumerate(blocks):
if not block_record_was_present[i] and block_records.contains_block(block_i.header_hash):
block_records.remove_block_record(block_i.header_hash)
return None
required_iters: uint64 = calculate_iterations_quality(
constants.DIFFICULTY_CONSTANT_FACTOR,
q_str,
block.reward_chain_block.proof_of_space.size,
difficulty,
cc_sp_hash,
)
block_rec = block_to_block_record(
constants,
block_records,
required_iters,
block,
None,
)
if block_rec.sub_epoch_summary_included is not None and wp_summaries is not None:
idx = int(block.height / constants.SUB_EPOCH_BLOCKS) - 1
next_ses = wp_summaries[idx]
if not block_rec.sub_epoch_summary_included.get_hash() == next_ses.get_hash():
log.error("sub_epoch_summary does not match wp sub_epoch_summary list")
return None
# Makes sure to not override the valid blocks already in block_records
if not block_records.contains_block(block_rec.header_hash):
block_records.add_block_record(block_rec) # Temporarily add block to dict
recent_blocks[block_rec.header_hash] = block_rec
recent_blocks_compressed[block_rec.header_hash] = block_rec
else:
recent_blocks[block_rec.header_hash] = block_records.block_record(block_rec.header_hash)
recent_blocks_compressed[block_rec.header_hash] = block_records.block_record(block_rec.header_hash)
prev_b = block_rec
diff_ssis.append((difficulty, sub_slot_iters))
block_dict: Dict[bytes32, Union[FullBlock, HeaderBlock]] = {}
for i, block in enumerate(blocks):
block_dict[block.header_hash] = block
if not block_record_was_present[i]:
block_records.remove_block_record(block.header_hash)
recent_sb_compressed_pickled = {bytes(k): bytes(v) for k, v in recent_blocks_compressed.items()}
npc_results_pickled = {}
for k, v in npc_results.items():
npc_results_pickled[k] = bytes(v)
futures = []
# Pool of workers to validate blocks concurrently
for i in range(0, len(blocks), batch_size):
end_i = min(i + batch_size, len(blocks))
blocks_to_validate = blocks[i:end_i]
if any([len(block.finished_sub_slots) > 0 for block in blocks_to_validate]):
final_pickled = {bytes(k): bytes(v) for k, v in recent_blocks.items()}
else:
final_pickled = recent_sb_compressed_pickled
b_pickled: Optional[List[bytes]] = None
hb_pickled: Optional[List[bytes]] = None
previous_generators: List[Optional[bytes]] = []
for block in blocks_to_validate:
# We ONLY add blocks which are in the past, based on header hashes (which are validated later) to the
# prev blocks dict. This is important since these blocks are assumed to be valid and are used as previous
# generator references
prev_blocks_dict: Dict[uint32, Union[FullBlock, HeaderBlock]] = {}
curr_b: Union[FullBlock, HeaderBlock] = block
while curr_b.prev_header_hash in block_dict:
curr_b = block_dict[curr_b.prev_header_hash]
prev_blocks_dict[curr_b.header_hash] = curr_b
if isinstance(block, FullBlock):
assert get_block_generator is not None
if b_pickled is None:
b_pickled = []
b_pickled.append(bytes(block))
try:
block_generator: Optional[BlockGenerator] = await get_block_generator(block, prev_blocks_dict)
except ValueError:
return None
if block_generator is not None:
previous_generators.append(bytes(block_generator))
else:
previous_generators.append(None)
else:
if hb_pickled is None:
hb_pickled = []
hb_pickled.append(bytes(block))
futures.append(
asyncio.get_running_loop().run_in_executor(
pool,
batch_pre_validate_blocks,
constants_json,
final_pickled,
b_pickled,
hb_pickled,
previous_generators,
npc_results_pickled,
check_filter,
[diff_ssis[j][0] for j in range(i, end_i)],
[diff_ssis[j][1] for j in range(i, end_i)],
)
)
# Collect all results into one flat list
return [
PreValidationResult.from_bytes(result)
for batch_result in (await asyncio.gather(*futures))
for result in batch_result
]
| 46.329154
| 117
| 0.660194
|
1c699cb9b740f7c3cce4823b9ca663fad84601f3
| 107,581
|
py
|
Python
|
fhir/resources/fhirtypes.py
|
iatechicken/fhir.resources
|
8ccb21aaa00755c6d230522bd7ddb655155b4bcb
|
[
"BSD-3-Clause"
] | null | null | null |
fhir/resources/fhirtypes.py
|
iatechicken/fhir.resources
|
8ccb21aaa00755c6d230522bd7ddb655155b4bcb
|
[
"BSD-3-Clause"
] | null | null | null |
fhir/resources/fhirtypes.py
|
iatechicken/fhir.resources
|
8ccb21aaa00755c6d230522bd7ddb655155b4bcb
|
[
"BSD-3-Clause"
] | null | null | null |
# _*_ coding: utf-8 _*_
import datetime
import re
from email.utils import formataddr, parseaddr
from typing import TYPE_CHECKING, Any, Dict, Optional, Pattern, Union
from uuid import UUID
from pydantic import AnyUrl
from pydantic.errors import DateError, DateTimeError, TimeError
from pydantic.main import load_str_bytes
from pydantic.networks import validate_email
from pydantic.types import (
ConstrainedBytes,
ConstrainedDecimal,
ConstrainedInt,
ConstrainedStr,
)
from pydantic.validators import bool_validator, parse_date, parse_datetime, parse_time
from .fhirabstractmodel import FHIRAbstractModel
from .fhirtypesvalidators import run_validator_for_fhir_type
if TYPE_CHECKING:
from pydantic.types import CallableGenerator
from pydantic.fields import ModelField
from pydantic import BaseConfig
__author__ = "Md Nazrul Islam<email2nazrul@gmail.com>"
FHIR_DATE_PARTS = re.compile(r"(?P<year>\d{4})(-(?P<month>\d{2}))?(-(?P<day>\d{2}))?$")
FHIR_PRIMITIVES = [
"boolean",
"string",
"base64Binary",
"code",
"id",
"decimal",
"integer",
"unsignedInt",
"positiveInt",
"uri",
"oid",
"uuid",
"canonical",
"url",
"markdown",
"xhtml",
"date",
"dateTime",
"instant",
"time",
]
class Primitive:
"""FHIR Primitive Data Type Base Class"""
__fhir_release__: str = "R4"
__visit_name__: Optional[str] = None
regex: Optional[Pattern[str]] = None
@classmethod
def is_primitive(cls) -> bool:
""" """
return True
@classmethod
def fhir_type_name(cls) -> Optional[str]:
""" """
return cls.__visit_name__
if TYPE_CHECKING:
Boolean = bool
else:
class Boolean(int, Primitive):
"""true | false"""
regex = re.compile("true|false")
__visit_name__ = "boolean"
@classmethod
def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None:
field_schema.update(type="boolean")
@classmethod
def __get_validators__(cls) -> "CallableGenerator":
yield bool_validator
class String(ConstrainedStr, Primitive):
"""A sequence of Unicode characters
Note that strings SHALL NOT exceed 1MB (1024*1024 characters) in size.
Strings SHOULD not contain Unicode character points below 32, except for
u0009 (horizontal tab), u0010 (carriage return) and u0013 (line feed).
Leading and Trailing whitespace is allowed, but SHOULD be removed when using
the XML format. Note: This means that a string that consists only of whitespace
could be trimmed to nothing, which would be treated as an invalid element value.
Therefore strings SHOULD always contain non-whitespace conten"""
regex = re.compile(r"[ \r\n\t\S]+")
__visit_name__ = "string"
class Base64Binary(ConstrainedBytes, Primitive):
"""A stream of bytes, base64 encoded (RFC 4648 )"""
regex = re.compile(r"(\s*([0-9a-zA-Z+=]){4}\s*)+")
__visit_name__ = "base64Binary"
class Code(ConstrainedStr, Primitive):
"""Indicates that the value is taken from a set of controlled
strings defined elsewhere (see Using codes for further discussion).
Technically, a code is restricted to a string which has at least one
character and no leading or trailing whitespace, and where there is
no whitespace other than single spaces in the contents"""
regex = re.compile(r"[^\s]+(\s[^\s]+)*")
__visit_name__ = "code"
class Id(ConstrainedStr, Primitive):
"""Any combination of upper- or lower-case ASCII letters
('A'..'Z', and 'a'..'z', numerals ('0'..'9'), '-' and '.',
with a length limit of 64 characters.
(This might be an integer, an un-prefixed OID, UUID or any other identifier
pattern that meets these constraints.)
"""
regex = re.compile(r"[A-Za-z0-9\-.]{1,64}")
min_length = 1
max_length = 64
__visit_name__ = "id"
class Decimal(ConstrainedDecimal, Primitive):
"""Rational numbers that have a decimal representation.
See below about the precision of the number"""
regex = re.compile(r"-?(0|[1-9][0-9]*)(\.[0-9]+)?([eE][+-]?[0-9]+)?")
__visit_name__ = "decimal"
class Integer(ConstrainedInt, Primitive):
"""A signed integer in the range −2,147,483,648..2,147,483,647 (32-bit;
for larger values, use decimal)"""
regex = re.compile(r"[0]|[-+]?[1-9][0-9]*")
__visit_name__ = "integer"
class UnsignedInt(ConstrainedInt, Primitive):
"""Any non-negative integer in the range 0..2,147,483,647"""
regex = re.compile(r"[0]|([1-9][0-9]*)")
__visit_name__ = "unsignedInt"
ge = 0
class PositiveInt(ConstrainedInt, Primitive):
"""Any positive integer in the range 1..2,147,483,647"""
regex = re.compile(r"\+?[1-9][0-9]*")
__visit_name__ = "positiveInt"
gt = 0
class Uri(ConstrainedStr, Primitive):
"""A Uniform Resource Identifier Reference (RFC 3986 ).
Note: URIs are case sensitive.
For UUID (urn:uuid:53fefa32-fcbb-4ff8-8a92-55ee120877b7)
use all lowercase xs:anyURI A JSON string - a URI
Regex: \\S* (This regex is very permissive, but URIs must be valid.
Implementers are welcome to use more specific regex statements
for a URI in specific contexts)
URIs can be absolute or relative, and may have an optional fragment identifier
This data type can be bound to a ValueSet"""
__visit_name__ = "uri"
regex = re.compile(r"\S*")
class Oid(ConstrainedStr, Primitive):
"""An OID represented as a URI (RFC 3001 ); e.g. urn:oid:1.2.3.4.5"""
__visit_name__ = "oid"
regex = re.compile(r"urn:oid:[0-2](\.(0|[1-9][0-9]*))+")
class Uuid(UUID, Primitive):
"""A UUID (aka GUID) represented as a URI (RFC 4122 );
e.g. urn:uuid:c757873d-ec9a-4326-a141-556f43239520"""
__visit_name__ = "uuid"
regex = None
class Canonical(Uri):
"""A URI that refers to a resource by its canonical URL (resources with a url property).
The canonical type differs from a uri in that it has special meaning in this specification,
and in that it may have a version appended, separated by a vertical bar (|).
Note that the type canonical is not used for the actual canonical URLs that are
the target of these references, but for the URIs that refer to them, and may have
the version suffix in them. Like other URIs, elements of type canonical may also have
#fragment references"""
__visit_name__ = "canonical"
class Url(AnyUrl, Primitive):
"""A Uniform Resource Locator (RFC 1738 ).
Note URLs are accessed directly using the specified protocol.
Common URL protocols are http{s}:, ftp:, mailto: and mllp:,
though many others are defined"""
__visit_name__ = "url"
@classmethod
def validate( # type: ignore
cls, value: str, field: "ModelField", config: "BaseConfig"
) -> Union["AnyUrl", str]:
""" """
if value.startswith("mailto:"):
schema = value[0:7]
email = value[7:]
realname = parseaddr(email)[0]
name, email = validate_email(email)
if realname:
email = formataddr((name, email))
return schema + email
elif value.startswith("mllp:") or value.startswith("llp:"):
# xxx: find validation
return value
elif value in FHIR_PRIMITIVES:
# Extensions may contain a valueUrl for a primitive FHIR type
return value
return AnyUrl.validate(value, field, config)
class Markdown(ConstrainedStr, Primitive):
"""A FHIR string (see above) that may contain markdown syntax for optional processing
by a markdown presentation engine, in the GFM extension of CommonMark format (see below)"""
__visit_name__ = "markdown"
regex = re.compile(r"\s*(\S|\s)*")
class Xhtml(ConstrainedStr, Primitive):
__visit_name__ = "xhtml"
class Date(datetime.date, Primitive):
"""A date, or partial date (e.g. just year or year + month)
as used in human communication. The format is YYYY, YYYY-MM, or YYYY-MM-DD,
e.g. 2018, 1973-06, or 1905-08-23.
There SHALL be no time zone. Dates SHALL be valid dates"""
regex = re.compile(
r"([0-9]([0-9]([0-9][1-9]|[1-9]0)|[1-9]00)|"
r"[1-9]000)(-(0[1-9]|1[0-2])(-(0[1-9]|[1-2]"
r"[0-9]|3[0-1]))?)?"
)
__visit_name__ = "date"
@classmethod
def __get_validators__(cls) -> "CallableGenerator":
yield cls.validate
@classmethod
def validate(
cls, value: Union[datetime.date, str, bytes, int, float]
) -> Union[datetime.date, str]:
""" """
if not isinstance(value, str):
# default handler
return parse_date(value)
match = FHIR_DATE_PARTS.match(value)
if not match:
if not cls.regex.match(value):
raise DateError()
elif not match.groupdict().get("day"):
if match.groupdict().get("month") and int(match.groupdict()["month"]) > 12:
raise DateError()
# we keep original
return value
return parse_date(value)
class DateTime(datetime.datetime, Primitive):
"""A date, date-time or partial date (e.g. just year or year + month) as used
in human communication. The format is YYYY, YYYY-MM, YYYY-MM-DD or
YYYY-MM-DDThh:mm:ss+zz:zz, e.g. 2018, 1973-06, 1905-08-23,
2015-02-07T13:28:17-05:00 or 2017-01-01T00:00:00.000Z.
If hours and minutes are specified, a time zone SHALL be populated.
Seconds must be provided due to schema type constraints but may be
zero-filled and may be ignored at receiver discretion.
Dates SHALL be valid dates. The time "24:00" is not allowed.
Leap Seconds are allowed - see below"""
regex = re.compile(
r"([0-9]([0-9]([0-9][1-9]|[1-9]0)|[1-9]00)|"
r"[1-9]000)(-(0[1-9]|1[0-2])(-(0[1-9]|[1-2][0-9]|"
r"3[0-1])(T([01][0-9]|2[0-3]):[0-5][0-9]:([0-5][0-9]|"
r"60)(\.[0-9]+)?(Z|([+\-])((0[0-9]|"
r"1[0-3]):[0-5][0-9]|14:00)))?)?)?"
)
__visit_name__ = "dateTime"
@classmethod
def __get_validators__(cls) -> "CallableGenerator":
yield cls.validate
@classmethod
def validate(
cls, value: Union[datetime.date, datetime.datetime, str, bytes, int, float]
) -> Union[datetime.datetime, datetime.date, str]:
""" """
if isinstance(value, datetime.date):
return value
if not isinstance(value, str):
# default handler
return parse_datetime(value)
match = FHIR_DATE_PARTS.match(value)
if match:
if (
match.groupdict().get("year")
and match.groupdict().get("month")
and match.groupdict().get("day")
):
return parse_date(value)
elif match.groupdict().get("year") and match.groupdict().get("month"):
if int(match.groupdict()["month"]) > 12:
raise DateError()
# we don't want to loose actual information, so keep as string
return value
if not cls.regex.match(value):
raise DateTimeError()
return parse_datetime(value)
class Instant(datetime.datetime, Primitive):
"""An instant in time in the format YYYY-MM-DDThh:mm:ss.sss+zz:zz
(e.g. 2015-02-07T13:28:17.239+02:00 or 2017-01-01T00:00:00Z).
The time SHALL specified at least to the second and SHALL include a time zone.
Note: This is intended for when precisely observed times are required
(typically system logs etc.), and not human-reported times - for those,
use date or dateTime (which can be as precise as instant,
but is not required to be). instant is a more constrained dateTime
Note: This type is for system times, not human times (see date and dateTime below)."""
regex = re.compile(
r"([0-9]([0-9]([0-9][1-9]|[1-9]0)|[1-9]00)|"
r"[1-9]000)-(0[1-9]|1[0-2])-(0[1-9]|[1-2][0-9]|"
r"3[0-1])T([01][0-9]|2[0-3]):[0-5][0-9]:([0-5][0-9]"
r"|60)(\.[0-9]+)?(Z|([+\-])((0[0-9]|"
r"1[0-3]):[0-5][0-9]|14:00))"
)
__visit_name__ = "instant"
@classmethod
def __get_validators__(cls) -> "CallableGenerator":
yield cls.validate
@classmethod
def validate(cls, value):
""" """
if isinstance(value, str):
if not cls.regex.match(value):
raise DateTimeError()
return parse_datetime(value)
class Time(datetime.time, Primitive):
"""A time during the day, in the format hh:mm:ss.
There is no date specified. Seconds must be provided due
to schema type constraints but may be zero-filled and may
be ignored at receiver discretion.
The time "24:00" SHALL NOT be used. A time zone SHALL NOT be present.
Times can be converted to a Duration since midnight."""
regex = re.compile(r"([01][0-9]|2[0-3]):[0-5][0-9]:([0-5][0-9]|60)(\.[0-9]+)?")
__visit_name__ = "time"
@classmethod
def __get_validators__(cls) -> "CallableGenerator":
yield cls.validate
@classmethod
def validate(cls, value):
""" """
if isinstance(value, str):
if not cls.regex.match(value):
raise TimeError()
return parse_time(value)
def get_fhir_type_class(model_name):
try:
return globals()[model_name + "Type"]
except KeyError:
raise LookupError(f"'{__name__}.{model_name}Type' doesnt found.")
class AbstractType(dict):
""" """
__fhir_release__: str = "R4"
__resource_type__: str = ... # type: ignore
@classmethod
def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None:
field_schema.update(type=cls.__resource_type__)
@classmethod
def __get_validators__(cls) -> "CallableGenerator":
from . import fhirtypesvalidators
yield getattr(fhirtypesvalidators, cls.__resource_type__.lower() + "_validator")
@classmethod
def is_primitive(cls) -> bool:
""" """
return False
@classmethod
def fhir_type_name(cls) -> str:
""" """
return cls.__resource_type__
class FHIRPrimitiveExtensionType(AbstractType):
""" """
__resource_type__ = "FHIRPrimitiveExtension"
class AbstractBaseType(dict):
""" """
__fhir_release__: str = "R4"
__resource_type__: str = ... # type: ignore
@classmethod
def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None:
field_schema.update(type=cls.__resource_type__)
@classmethod
def __get_validators__(cls) -> "CallableGenerator":
yield cls.validate
@classmethod
def validate(cls, v, values, config, field):
""" """
if isinstance(v, (bytes, str)):
input_data = load_str_bytes(v)
resource_type = input_data.get("resourceType", None)
elif isinstance(v, FHIRAbstractModel):
resource_type = v.resource_type
else:
resource_type = v.get("resourceType", None)
if resource_type is None or resource_type == cls.__resource_type__:
from . import fhirtypesvalidators
v = getattr(
fhirtypesvalidators, cls.__resource_type__.lower() + "_validator"
)(v)
return v
type_class = get_fhir_type_class(resource_type)
v = run_validator_for_fhir_type(type_class, v, values, config, field)
return v
@classmethod
def is_primitive(cls) -> bool:
""" """
return False
@classmethod
def fhir_type_name(cls) -> str:
""" """
return cls.__resource_type__
class ElementType(AbstractBaseType):
""" """
__resource_type__ = "Element"
class ResourceType(AbstractBaseType):
""" """
__resource_type__ = "Resource"
class AccountType(AbstractType):
__resource_type__ = "Account"
class AccountCoverageType(AbstractType):
__resource_type__ = "AccountCoverage"
class AccountGuarantorType(AbstractType):
__resource_type__ = "AccountGuarantor"
class ActivityDefinitionType(AbstractType):
__resource_type__ = "ActivityDefinition"
class ActivityDefinitionDynamicValueType(AbstractType):
__resource_type__ = "ActivityDefinitionDynamicValue"
class ActivityDefinitionParticipantType(AbstractType):
__resource_type__ = "ActivityDefinitionParticipant"
class AddressType(AbstractType):
__resource_type__ = "Address"
class AdverseEventType(AbstractType):
__resource_type__ = "AdverseEvent"
class AdverseEventSuspectEntityType(AbstractType):
__resource_type__ = "AdverseEventSuspectEntity"
class AdverseEventSuspectEntityCausalityType(AbstractType):
__resource_type__ = "AdverseEventSuspectEntityCausality"
class AgeType(AbstractType):
__resource_type__ = "Age"
class AllergyIntoleranceType(AbstractType):
__resource_type__ = "AllergyIntolerance"
class AllergyIntoleranceReactionType(AbstractType):
__resource_type__ = "AllergyIntoleranceReaction"
class AnnotationType(AbstractType):
__resource_type__ = "Annotation"
class AppointmentType(AbstractType):
__resource_type__ = "Appointment"
class AppointmentParticipantType(AbstractType):
__resource_type__ = "AppointmentParticipant"
class AppointmentResponseType(AbstractType):
__resource_type__ = "AppointmentResponse"
class AttachmentType(AbstractType):
__resource_type__ = "Attachment"
class AuditEventType(AbstractType):
__resource_type__ = "AuditEvent"
class AuditEventAgentType(AbstractType):
__resource_type__ = "AuditEventAgent"
class AuditEventAgentNetworkType(AbstractType):
__resource_type__ = "AuditEventAgentNetwork"
class AuditEventEntityType(AbstractType):
__resource_type__ = "AuditEventEntity"
class AuditEventEntityDetailType(AbstractType):
__resource_type__ = "AuditEventEntityDetail"
class AuditEventSourceType(AbstractType):
__resource_type__ = "AuditEventSource"
class BackboneElementType(AbstractType):
__resource_type__ = "BackboneElement"
class BasicType(AbstractType):
__resource_type__ = "Basic"
class BinaryType(AbstractType):
__resource_type__ = "Binary"
class BiologicallyDerivedProductType(AbstractType):
__resource_type__ = "BiologicallyDerivedProduct"
class BiologicallyDerivedProductCollectionType(AbstractType):
__resource_type__ = "BiologicallyDerivedProductCollection"
class BiologicallyDerivedProductManipulationType(AbstractType):
__resource_type__ = "BiologicallyDerivedProductManipulation"
class BiologicallyDerivedProductProcessingType(AbstractType):
__resource_type__ = "BiologicallyDerivedProductProcessing"
class BiologicallyDerivedProductStorageType(AbstractType):
__resource_type__ = "BiologicallyDerivedProductStorage"
class BodyStructureType(AbstractType):
__resource_type__ = "BodyStructure"
class BundleType(AbstractType):
__resource_type__ = "Bundle"
class BundleEntryType(AbstractType):
__resource_type__ = "BundleEntry"
class BundleEntryRequestType(AbstractType):
__resource_type__ = "BundleEntryRequest"
class BundleEntryResponseType(AbstractType):
__resource_type__ = "BundleEntryResponse"
class BundleEntrySearchType(AbstractType):
__resource_type__ = "BundleEntrySearch"
class BundleLinkType(AbstractType):
__resource_type__ = "BundleLink"
class CapabilityStatementType(AbstractType):
__resource_type__ = "CapabilityStatement"
class CapabilityStatementDocumentType(AbstractType):
__resource_type__ = "CapabilityStatementDocument"
class CapabilityStatementImplementationType(AbstractType):
__resource_type__ = "CapabilityStatementImplementation"
class CapabilityStatementMessagingType(AbstractType):
__resource_type__ = "CapabilityStatementMessaging"
class CapabilityStatementMessagingEndpointType(AbstractType):
__resource_type__ = "CapabilityStatementMessagingEndpoint"
class CapabilityStatementMessagingSupportedMessageType(AbstractType):
__resource_type__ = "CapabilityStatementMessagingSupportedMessage"
class CapabilityStatementRestType(AbstractType):
__resource_type__ = "CapabilityStatementRest"
class CapabilityStatementRestInteractionType(AbstractType):
__resource_type__ = "CapabilityStatementRestInteraction"
class CapabilityStatementRestResourceType(AbstractType):
__resource_type__ = "CapabilityStatementRestResource"
class CapabilityStatementRestResourceInteractionType(AbstractType):
__resource_type__ = "CapabilityStatementRestResourceInteraction"
class CapabilityStatementRestResourceOperationType(AbstractType):
__resource_type__ = "CapabilityStatementRestResourceOperation"
class CapabilityStatementRestResourceSearchParamType(AbstractType):
__resource_type__ = "CapabilityStatementRestResourceSearchParam"
class CapabilityStatementRestSecurityType(AbstractType):
__resource_type__ = "CapabilityStatementRestSecurity"
class CapabilityStatementSoftwareType(AbstractType):
__resource_type__ = "CapabilityStatementSoftware"
class CarePlanType(AbstractType):
__resource_type__ = "CarePlan"
class CarePlanActivityType(AbstractType):
__resource_type__ = "CarePlanActivity"
class CarePlanActivityDetailType(AbstractType):
__resource_type__ = "CarePlanActivityDetail"
class CareTeamType(AbstractType):
__resource_type__ = "CareTeam"
class CareTeamParticipantType(AbstractType):
__resource_type__ = "CareTeamParticipant"
class CatalogEntryType(AbstractType):
__resource_type__ = "CatalogEntry"
class CatalogEntryRelatedEntryType(AbstractType):
__resource_type__ = "CatalogEntryRelatedEntry"
class ChargeItemType(AbstractType):
__resource_type__ = "ChargeItem"
class ChargeItemDefinitionType(AbstractType):
__resource_type__ = "ChargeItemDefinition"
class ChargeItemDefinitionApplicabilityType(AbstractType):
__resource_type__ = "ChargeItemDefinitionApplicability"
class ChargeItemDefinitionPropertyGroupType(AbstractType):
__resource_type__ = "ChargeItemDefinitionPropertyGroup"
class ChargeItemDefinitionPropertyGroupPriceComponentType(AbstractType):
__resource_type__ = "ChargeItemDefinitionPropertyGroupPriceComponent"
class ChargeItemPerformerType(AbstractType):
__resource_type__ = "ChargeItemPerformer"
class ClaimType(AbstractType):
__resource_type__ = "Claim"
class ClaimAccidentType(AbstractType):
__resource_type__ = "ClaimAccident"
class ClaimCareTeamType(AbstractType):
__resource_type__ = "ClaimCareTeam"
class ClaimDiagnosisType(AbstractType):
__resource_type__ = "ClaimDiagnosis"
class ClaimInsuranceType(AbstractType):
__resource_type__ = "ClaimInsurance"
class ClaimItemType(AbstractType):
__resource_type__ = "ClaimItem"
class ClaimItemDetailType(AbstractType):
__resource_type__ = "ClaimItemDetail"
class ClaimItemDetailSubDetailType(AbstractType):
__resource_type__ = "ClaimItemDetailSubDetail"
class ClaimPayeeType(AbstractType):
__resource_type__ = "ClaimPayee"
class ClaimProcedureType(AbstractType):
__resource_type__ = "ClaimProcedure"
class ClaimRelatedType(AbstractType):
__resource_type__ = "ClaimRelated"
class ClaimResponseType(AbstractType):
__resource_type__ = "ClaimResponse"
class ClaimResponseAddItemType(AbstractType):
__resource_type__ = "ClaimResponseAddItem"
class ClaimResponseAddItemDetailType(AbstractType):
__resource_type__ = "ClaimResponseAddItemDetail"
class ClaimResponseAddItemDetailSubDetailType(AbstractType):
__resource_type__ = "ClaimResponseAddItemDetailSubDetail"
class ClaimResponseErrorType(AbstractType):
__resource_type__ = "ClaimResponseError"
class ClaimResponseInsuranceType(AbstractType):
__resource_type__ = "ClaimResponseInsurance"
class ClaimResponseItemType(AbstractType):
__resource_type__ = "ClaimResponseItem"
class ClaimResponseItemAdjudicationType(AbstractType):
__resource_type__ = "ClaimResponseItemAdjudication"
class ClaimResponseItemDetailType(AbstractType):
__resource_type__ = "ClaimResponseItemDetail"
class ClaimResponseItemDetailSubDetailType(AbstractType):
__resource_type__ = "ClaimResponseItemDetailSubDetail"
class ClaimResponsePaymentType(AbstractType):
__resource_type__ = "ClaimResponsePayment"
class ClaimResponseProcessNoteType(AbstractType):
__resource_type__ = "ClaimResponseProcessNote"
class ClaimResponseTotalType(AbstractType):
__resource_type__ = "ClaimResponseTotal"
class ClaimSupportingInfoType(AbstractType):
__resource_type__ = "ClaimSupportingInfo"
class ClinicalImpressionType(AbstractType):
__resource_type__ = "ClinicalImpression"
class ClinicalImpressionFindingType(AbstractType):
__resource_type__ = "ClinicalImpressionFinding"
class ClinicalImpressionInvestigationType(AbstractType):
__resource_type__ = "ClinicalImpressionInvestigation"
class CodeSystemType(AbstractType):
__resource_type__ = "CodeSystem"
class CodeSystemConceptType(AbstractType):
__resource_type__ = "CodeSystemConcept"
class CodeSystemConceptDesignationType(AbstractType):
__resource_type__ = "CodeSystemConceptDesignation"
class CodeSystemConceptPropertyType(AbstractType):
__resource_type__ = "CodeSystemConceptProperty"
class CodeSystemFilterType(AbstractType):
__resource_type__ = "CodeSystemFilter"
class CodeSystemPropertyType(AbstractType):
__resource_type__ = "CodeSystemProperty"
class CodeableConceptType(AbstractType):
__resource_type__ = "CodeableConcept"
class CodingType(AbstractType):
__resource_type__ = "Coding"
class CommunicationType(AbstractType):
__resource_type__ = "Communication"
class CommunicationPayloadType(AbstractType):
__resource_type__ = "CommunicationPayload"
class CommunicationRequestType(AbstractType):
__resource_type__ = "CommunicationRequest"
class CommunicationRequestPayloadType(AbstractType):
__resource_type__ = "CommunicationRequestPayload"
class CompartmentDefinitionType(AbstractType):
__resource_type__ = "CompartmentDefinition"
class CompartmentDefinitionResourceType(AbstractType):
__resource_type__ = "CompartmentDefinitionResource"
class CompositionType(AbstractType):
__resource_type__ = "Composition"
class CompositionAttesterType(AbstractType):
__resource_type__ = "CompositionAttester"
class CompositionEventType(AbstractType):
__resource_type__ = "CompositionEvent"
class CompositionRelatesToType(AbstractType):
__resource_type__ = "CompositionRelatesTo"
class CompositionSectionType(AbstractType):
__resource_type__ = "CompositionSection"
class ConceptMapType(AbstractType):
__resource_type__ = "ConceptMap"
class ConceptMapGroupType(AbstractType):
__resource_type__ = "ConceptMapGroup"
class ConceptMapGroupElementType(AbstractType):
__resource_type__ = "ConceptMapGroupElement"
class ConceptMapGroupElementTargetType(AbstractType):
__resource_type__ = "ConceptMapGroupElementTarget"
class ConceptMapGroupElementTargetDependsOnType(AbstractType):
__resource_type__ = "ConceptMapGroupElementTargetDependsOn"
class ConceptMapGroupUnmappedType(AbstractType):
__resource_type__ = "ConceptMapGroupUnmapped"
class ConditionType(AbstractType):
__resource_type__ = "Condition"
class ConditionEvidenceType(AbstractType):
__resource_type__ = "ConditionEvidence"
class ConditionStageType(AbstractType):
__resource_type__ = "ConditionStage"
class ConsentType(AbstractType):
__resource_type__ = "Consent"
class ConsentPolicyType(AbstractType):
__resource_type__ = "ConsentPolicy"
class ConsentProvisionType(AbstractType):
__resource_type__ = "ConsentProvision"
class ConsentProvisionActorType(AbstractType):
__resource_type__ = "ConsentProvisionActor"
class ConsentProvisionDataType(AbstractType):
__resource_type__ = "ConsentProvisionData"
class ConsentVerificationType(AbstractType):
__resource_type__ = "ConsentVerification"
class ContactDetailType(AbstractType):
__resource_type__ = "ContactDetail"
class ContactPointType(AbstractType):
__resource_type__ = "ContactPoint"
class ContractType(AbstractType):
__resource_type__ = "Contract"
class ContractContentDefinitionType(AbstractType):
__resource_type__ = "ContractContentDefinition"
class ContractFriendlyType(AbstractType):
__resource_type__ = "ContractFriendly"
class ContractLegalType(AbstractType):
__resource_type__ = "ContractLegal"
class ContractRuleType(AbstractType):
__resource_type__ = "ContractRule"
class ContractSignerType(AbstractType):
__resource_type__ = "ContractSigner"
class ContractTermType(AbstractType):
__resource_type__ = "ContractTerm"
class ContractTermActionType(AbstractType):
__resource_type__ = "ContractTermAction"
class ContractTermActionSubjectType(AbstractType):
__resource_type__ = "ContractTermActionSubject"
class ContractTermAssetType(AbstractType):
__resource_type__ = "ContractTermAsset"
class ContractTermAssetContextType(AbstractType):
__resource_type__ = "ContractTermAssetContext"
class ContractTermAssetValuedItemType(AbstractType):
__resource_type__ = "ContractTermAssetValuedItem"
class ContractTermOfferType(AbstractType):
__resource_type__ = "ContractTermOffer"
class ContractTermOfferAnswerType(AbstractType):
__resource_type__ = "ContractTermOfferAnswer"
class ContractTermOfferPartyType(AbstractType):
__resource_type__ = "ContractTermOfferParty"
class ContractTermSecurityLabelType(AbstractType):
__resource_type__ = "ContractTermSecurityLabel"
class ContributorType(AbstractType):
__resource_type__ = "Contributor"
class CountType(AbstractType):
__resource_type__ = "Count"
class CoverageType(AbstractType):
__resource_type__ = "Coverage"
class CoverageClassType(AbstractType):
__resource_type__ = "CoverageClass"
class CoverageCostToBeneficiaryType(AbstractType):
__resource_type__ = "CoverageCostToBeneficiary"
class CoverageCostToBeneficiaryExceptionType(AbstractType):
__resource_type__ = "CoverageCostToBeneficiaryException"
class CoverageEligibilityRequestType(AbstractType):
__resource_type__ = "CoverageEligibilityRequest"
class CoverageEligibilityRequestInsuranceType(AbstractType):
__resource_type__ = "CoverageEligibilityRequestInsurance"
class CoverageEligibilityRequestItemType(AbstractType):
__resource_type__ = "CoverageEligibilityRequestItem"
class CoverageEligibilityRequestItemDiagnosisType(AbstractType):
__resource_type__ = "CoverageEligibilityRequestItemDiagnosis"
class CoverageEligibilityRequestSupportingInfoType(AbstractType):
__resource_type__ = "CoverageEligibilityRequestSupportingInfo"
class CoverageEligibilityResponseType(AbstractType):
__resource_type__ = "CoverageEligibilityResponse"
class CoverageEligibilityResponseErrorType(AbstractType):
__resource_type__ = "CoverageEligibilityResponseError"
class CoverageEligibilityResponseInsuranceType(AbstractType):
__resource_type__ = "CoverageEligibilityResponseInsurance"
class CoverageEligibilityResponseInsuranceItemType(AbstractType):
__resource_type__ = "CoverageEligibilityResponseInsuranceItem"
class CoverageEligibilityResponseInsuranceItemBenefitType(AbstractType):
__resource_type__ = "CoverageEligibilityResponseInsuranceItemBenefit"
class DataRequirementType(AbstractType):
__resource_type__ = "DataRequirement"
class DataRequirementCodeFilterType(AbstractType):
__resource_type__ = "DataRequirementCodeFilter"
class DataRequirementDateFilterType(AbstractType):
__resource_type__ = "DataRequirementDateFilter"
class DataRequirementSortType(AbstractType):
__resource_type__ = "DataRequirementSort"
class DetectedIssueType(AbstractType):
__resource_type__ = "DetectedIssue"
class DetectedIssueEvidenceType(AbstractType):
__resource_type__ = "DetectedIssueEvidence"
class DetectedIssueMitigationType(AbstractType):
__resource_type__ = "DetectedIssueMitigation"
class DeviceType(AbstractType):
__resource_type__ = "Device"
class DeviceDefinitionType(AbstractType):
__resource_type__ = "DeviceDefinition"
class DeviceDefinitionCapabilityType(AbstractType):
__resource_type__ = "DeviceDefinitionCapability"
class DeviceDefinitionDeviceNameType(AbstractType):
__resource_type__ = "DeviceDefinitionDeviceName"
class DeviceDefinitionMaterialType(AbstractType):
__resource_type__ = "DeviceDefinitionMaterial"
class DeviceDefinitionPropertyType(AbstractType):
__resource_type__ = "DeviceDefinitionProperty"
class DeviceDefinitionSpecializationType(AbstractType):
__resource_type__ = "DeviceDefinitionSpecialization"
class DeviceDefinitionUdiDeviceIdentifierType(AbstractType):
__resource_type__ = "DeviceDefinitionUdiDeviceIdentifier"
class DeviceDeviceNameType(AbstractType):
__resource_type__ = "DeviceDeviceName"
class DeviceMetricType(AbstractType):
__resource_type__ = "DeviceMetric"
class DeviceMetricCalibrationType(AbstractType):
__resource_type__ = "DeviceMetricCalibration"
class DevicePropertyType(AbstractType):
__resource_type__ = "DeviceProperty"
class DeviceRequestType(AbstractType):
__resource_type__ = "DeviceRequest"
class DeviceRequestParameterType(AbstractType):
__resource_type__ = "DeviceRequestParameter"
class DeviceSpecializationType(AbstractType):
__resource_type__ = "DeviceSpecialization"
class DeviceUdiCarrierType(AbstractType):
__resource_type__ = "DeviceUdiCarrier"
class DeviceUseStatementType(AbstractType):
__resource_type__ = "DeviceUseStatement"
class DeviceVersionType(AbstractType):
__resource_type__ = "DeviceVersion"
class DiagnosticReportType(AbstractType):
__resource_type__ = "DiagnosticReport"
class DiagnosticReportMediaType(AbstractType):
__resource_type__ = "DiagnosticReportMedia"
class DistanceType(AbstractType):
__resource_type__ = "Distance"
class DocumentManifestType(AbstractType):
__resource_type__ = "DocumentManifest"
class DocumentManifestRelatedType(AbstractType):
__resource_type__ = "DocumentManifestRelated"
class DocumentReferenceType(AbstractType):
__resource_type__ = "DocumentReference"
class DocumentReferenceContentType(AbstractType):
__resource_type__ = "DocumentReferenceContent"
class DocumentReferenceContextType(AbstractType):
__resource_type__ = "DocumentReferenceContext"
class DocumentReferenceRelatesToType(AbstractType):
__resource_type__ = "DocumentReferenceRelatesTo"
class DomainResourceType(AbstractType):
__resource_type__ = "DomainResource"
class DosageType(AbstractType):
__resource_type__ = "Dosage"
class DosageDoseAndRateType(AbstractType):
__resource_type__ = "DosageDoseAndRate"
class DurationType(AbstractType):
__resource_type__ = "Duration"
class EffectEvidenceSynthesisType(AbstractType):
__resource_type__ = "EffectEvidenceSynthesis"
class EffectEvidenceSynthesisCertaintyType(AbstractType):
__resource_type__ = "EffectEvidenceSynthesisCertainty"
class EffectEvidenceSynthesisCertaintyCertaintySubcomponentType(AbstractType):
__resource_type__ = "EffectEvidenceSynthesisCertaintyCertaintySubcomponent"
class EffectEvidenceSynthesisEffectEstimateType(AbstractType):
__resource_type__ = "EffectEvidenceSynthesisEffectEstimate"
class EffectEvidenceSynthesisEffectEstimatePrecisionEstimateType(AbstractType):
__resource_type__ = "EffectEvidenceSynthesisEffectEstimatePrecisionEstimate"
class EffectEvidenceSynthesisResultsByExposureType(AbstractType):
__resource_type__ = "EffectEvidenceSynthesisResultsByExposure"
class EffectEvidenceSynthesisSampleSizeType(AbstractType):
__resource_type__ = "EffectEvidenceSynthesisSampleSize"
class ElementDefinitionType(AbstractType):
__resource_type__ = "ElementDefinition"
class ElementDefinitionBaseType(AbstractType):
__resource_type__ = "ElementDefinitionBase"
class ElementDefinitionBindingType(AbstractType):
__resource_type__ = "ElementDefinitionBinding"
class ElementDefinitionConstraintType(AbstractType):
__resource_type__ = "ElementDefinitionConstraint"
class ElementDefinitionExampleType(AbstractType):
__resource_type__ = "ElementDefinitionExample"
class ElementDefinitionMappingType(AbstractType):
__resource_type__ = "ElementDefinitionMapping"
class ElementDefinitionSlicingType(AbstractType):
__resource_type__ = "ElementDefinitionSlicing"
class ElementDefinitionSlicingDiscriminatorType(AbstractType):
__resource_type__ = "ElementDefinitionSlicingDiscriminator"
class ElementDefinitionTypeType(AbstractType):
__resource_type__ = "ElementDefinitionType"
class EncounterType(AbstractType):
__resource_type__ = "Encounter"
class EncounterClassHistoryType(AbstractType):
__resource_type__ = "EncounterClassHistory"
class EncounterDiagnosisType(AbstractType):
__resource_type__ = "EncounterDiagnosis"
class EncounterHospitalizationType(AbstractType):
__resource_type__ = "EncounterHospitalization"
class EncounterLocationType(AbstractType):
__resource_type__ = "EncounterLocation"
class EncounterParticipantType(AbstractType):
__resource_type__ = "EncounterParticipant"
class EncounterStatusHistoryType(AbstractType):
__resource_type__ = "EncounterStatusHistory"
class EndpointType(AbstractType):
__resource_type__ = "Endpoint"
class EnrollmentRequestType(AbstractType):
__resource_type__ = "EnrollmentRequest"
class EnrollmentResponseType(AbstractType):
__resource_type__ = "EnrollmentResponse"
class EpisodeOfCareType(AbstractType):
__resource_type__ = "EpisodeOfCare"
class EpisodeOfCareDiagnosisType(AbstractType):
__resource_type__ = "EpisodeOfCareDiagnosis"
class EpisodeOfCareStatusHistoryType(AbstractType):
__resource_type__ = "EpisodeOfCareStatusHistory"
class EventDefinitionType(AbstractType):
__resource_type__ = "EventDefinition"
class EvidenceType(AbstractType):
__resource_type__ = "Evidence"
class EvidenceVariableType(AbstractType):
__resource_type__ = "EvidenceVariable"
class EvidenceVariableCharacteristicType(AbstractType):
__resource_type__ = "EvidenceVariableCharacteristic"
class ExampleScenarioType(AbstractType):
__resource_type__ = "ExampleScenario"
class ExampleScenarioActorType(AbstractType):
__resource_type__ = "ExampleScenarioActor"
class ExampleScenarioInstanceType(AbstractType):
__resource_type__ = "ExampleScenarioInstance"
class ExampleScenarioInstanceContainedInstanceType(AbstractType):
__resource_type__ = "ExampleScenarioInstanceContainedInstance"
class ExampleScenarioInstanceVersionType(AbstractType):
__resource_type__ = "ExampleScenarioInstanceVersion"
class ExampleScenarioProcessType(AbstractType):
__resource_type__ = "ExampleScenarioProcess"
class ExampleScenarioProcessStepType(AbstractType):
__resource_type__ = "ExampleScenarioProcessStep"
class ExampleScenarioProcessStepAlternativeType(AbstractType):
__resource_type__ = "ExampleScenarioProcessStepAlternative"
class ExampleScenarioProcessStepOperationType(AbstractType):
__resource_type__ = "ExampleScenarioProcessStepOperation"
class ExplanationOfBenefitType(AbstractType):
__resource_type__ = "ExplanationOfBenefit"
class ExplanationOfBenefitAccidentType(AbstractType):
__resource_type__ = "ExplanationOfBenefitAccident"
class ExplanationOfBenefitAddItemType(AbstractType):
__resource_type__ = "ExplanationOfBenefitAddItem"
class ExplanationOfBenefitAddItemDetailType(AbstractType):
__resource_type__ = "ExplanationOfBenefitAddItemDetail"
class ExplanationOfBenefitAddItemDetailSubDetailType(AbstractType):
__resource_type__ = "ExplanationOfBenefitAddItemDetailSubDetail"
class ExplanationOfBenefitBenefitBalanceType(AbstractType):
__resource_type__ = "ExplanationOfBenefitBenefitBalance"
class ExplanationOfBenefitBenefitBalanceFinancialType(AbstractType):
__resource_type__ = "ExplanationOfBenefitBenefitBalanceFinancial"
class ExplanationOfBenefitCareTeamType(AbstractType):
__resource_type__ = "ExplanationOfBenefitCareTeam"
class ExplanationOfBenefitDiagnosisType(AbstractType):
__resource_type__ = "ExplanationOfBenefitDiagnosis"
class ExplanationOfBenefitInsuranceType(AbstractType):
__resource_type__ = "ExplanationOfBenefitInsurance"
class ExplanationOfBenefitItemType(AbstractType):
__resource_type__ = "ExplanationOfBenefitItem"
class ExplanationOfBenefitItemAdjudicationType(AbstractType):
__resource_type__ = "ExplanationOfBenefitItemAdjudication"
class ExplanationOfBenefitItemDetailType(AbstractType):
__resource_type__ = "ExplanationOfBenefitItemDetail"
class ExplanationOfBenefitItemDetailSubDetailType(AbstractType):
__resource_type__ = "ExplanationOfBenefitItemDetailSubDetail"
class ExplanationOfBenefitPayeeType(AbstractType):
__resource_type__ = "ExplanationOfBenefitPayee"
class ExplanationOfBenefitPaymentType(AbstractType):
__resource_type__ = "ExplanationOfBenefitPayment"
class ExplanationOfBenefitProcedureType(AbstractType):
__resource_type__ = "ExplanationOfBenefitProcedure"
class ExplanationOfBenefitProcessNoteType(AbstractType):
__resource_type__ = "ExplanationOfBenefitProcessNote"
class ExplanationOfBenefitRelatedType(AbstractType):
__resource_type__ = "ExplanationOfBenefitRelated"
class ExplanationOfBenefitSupportingInfoType(AbstractType):
__resource_type__ = "ExplanationOfBenefitSupportingInfo"
class ExplanationOfBenefitTotalType(AbstractType):
__resource_type__ = "ExplanationOfBenefitTotal"
class ExpressionType(AbstractType):
__resource_type__ = "Expression"
class ExtensionType(AbstractType):
__resource_type__ = "Extension"
class FamilyMemberHistoryType(AbstractType):
__resource_type__ = "FamilyMemberHistory"
class FamilyMemberHistoryConditionType(AbstractType):
__resource_type__ = "FamilyMemberHistoryCondition"
class FlagType(AbstractType):
__resource_type__ = "Flag"
class GoalType(AbstractType):
__resource_type__ = "Goal"
class GoalTargetType(AbstractType):
__resource_type__ = "GoalTarget"
class GraphDefinitionType(AbstractType):
__resource_type__ = "GraphDefinition"
class GraphDefinitionLinkType(AbstractType):
__resource_type__ = "GraphDefinitionLink"
class GraphDefinitionLinkTargetType(AbstractType):
__resource_type__ = "GraphDefinitionLinkTarget"
class GraphDefinitionLinkTargetCompartmentType(AbstractType):
__resource_type__ = "GraphDefinitionLinkTargetCompartment"
class GroupType(AbstractType):
__resource_type__ = "Group"
class GroupCharacteristicType(AbstractType):
__resource_type__ = "GroupCharacteristic"
class GroupMemberType(AbstractType):
__resource_type__ = "GroupMember"
class GuidanceResponseType(AbstractType):
__resource_type__ = "GuidanceResponse"
class HealthcareServiceType(AbstractType):
__resource_type__ = "HealthcareService"
class HealthcareServiceAvailableTimeType(AbstractType):
__resource_type__ = "HealthcareServiceAvailableTime"
class HealthcareServiceEligibilityType(AbstractType):
__resource_type__ = "HealthcareServiceEligibility"
class HealthcareServiceNotAvailableType(AbstractType):
__resource_type__ = "HealthcareServiceNotAvailable"
class HumanNameType(AbstractType):
__resource_type__ = "HumanName"
class IdentifierType(AbstractType):
__resource_type__ = "Identifier"
class ImagingStudyType(AbstractType):
__resource_type__ = "ImagingStudy"
class ImagingStudySeriesType(AbstractType):
__resource_type__ = "ImagingStudySeries"
class ImagingStudySeriesInstanceType(AbstractType):
__resource_type__ = "ImagingStudySeriesInstance"
class ImagingStudySeriesPerformerType(AbstractType):
__resource_type__ = "ImagingStudySeriesPerformer"
class ImmunizationType(AbstractType):
__resource_type__ = "Immunization"
class ImmunizationEducationType(AbstractType):
__resource_type__ = "ImmunizationEducation"
class ImmunizationEvaluationType(AbstractType):
__resource_type__ = "ImmunizationEvaluation"
class ImmunizationPerformerType(AbstractType):
__resource_type__ = "ImmunizationPerformer"
class ImmunizationProtocolAppliedType(AbstractType):
__resource_type__ = "ImmunizationProtocolApplied"
class ImmunizationReactionType(AbstractType):
__resource_type__ = "ImmunizationReaction"
class ImmunizationRecommendationType(AbstractType):
__resource_type__ = "ImmunizationRecommendation"
class ImmunizationRecommendationRecommendationType(AbstractType):
__resource_type__ = "ImmunizationRecommendationRecommendation"
class ImmunizationRecommendationRecommendationDateCriterionType(AbstractType):
__resource_type__ = "ImmunizationRecommendationRecommendationDateCriterion"
class ImplementationGuideType(AbstractType):
__resource_type__ = "ImplementationGuide"
class ImplementationGuideDefinitionType(AbstractType):
__resource_type__ = "ImplementationGuideDefinition"
class ImplementationGuideDefinitionGroupingType(AbstractType):
__resource_type__ = "ImplementationGuideDefinitionGrouping"
class ImplementationGuideDefinitionPageType(AbstractType):
__resource_type__ = "ImplementationGuideDefinitionPage"
class ImplementationGuideDefinitionParameterType(AbstractType):
__resource_type__ = "ImplementationGuideDefinitionParameter"
class ImplementationGuideDefinitionResourceType(AbstractType):
__resource_type__ = "ImplementationGuideDefinitionResource"
class ImplementationGuideDefinitionTemplateType(AbstractType):
__resource_type__ = "ImplementationGuideDefinitionTemplate"
class ImplementationGuideDependsOnType(AbstractType):
__resource_type__ = "ImplementationGuideDependsOn"
class ImplementationGuideGlobalType(AbstractType):
__resource_type__ = "ImplementationGuideGlobal"
class ImplementationGuideManifestType(AbstractType):
__resource_type__ = "ImplementationGuideManifest"
class ImplementationGuideManifestPageType(AbstractType):
__resource_type__ = "ImplementationGuideManifestPage"
class ImplementationGuideManifestResourceType(AbstractType):
__resource_type__ = "ImplementationGuideManifestResource"
class InsurancePlanType(AbstractType):
__resource_type__ = "InsurancePlan"
class InsurancePlanContactType(AbstractType):
__resource_type__ = "InsurancePlanContact"
class InsurancePlanCoverageType(AbstractType):
__resource_type__ = "InsurancePlanCoverage"
class InsurancePlanCoverageBenefitType(AbstractType):
__resource_type__ = "InsurancePlanCoverageBenefit"
class InsurancePlanCoverageBenefitLimitType(AbstractType):
__resource_type__ = "InsurancePlanCoverageBenefitLimit"
class InsurancePlanPlanType(AbstractType):
__resource_type__ = "InsurancePlanPlan"
class InsurancePlanPlanGeneralCostType(AbstractType):
__resource_type__ = "InsurancePlanPlanGeneralCost"
class InsurancePlanPlanSpecificCostType(AbstractType):
__resource_type__ = "InsurancePlanPlanSpecificCost"
class InsurancePlanPlanSpecificCostBenefitType(AbstractType):
__resource_type__ = "InsurancePlanPlanSpecificCostBenefit"
class InsurancePlanPlanSpecificCostBenefitCostType(AbstractType):
__resource_type__ = "InsurancePlanPlanSpecificCostBenefitCost"
class InvoiceType(AbstractType):
__resource_type__ = "Invoice"
class InvoiceLineItemType(AbstractType):
__resource_type__ = "InvoiceLineItem"
class InvoiceLineItemPriceComponentType(AbstractType):
__resource_type__ = "InvoiceLineItemPriceComponent"
class InvoiceParticipantType(AbstractType):
__resource_type__ = "InvoiceParticipant"
class LibraryType(AbstractType):
__resource_type__ = "Library"
class LinkageType(AbstractType):
__resource_type__ = "Linkage"
class LinkageItemType(AbstractType):
__resource_type__ = "LinkageItem"
class ListType(AbstractType):
__resource_type__ = "List"
class ListEntryType(AbstractType):
__resource_type__ = "ListEntry"
class LocationType(AbstractType):
__resource_type__ = "Location"
class LocationHoursOfOperationType(AbstractType):
__resource_type__ = "LocationHoursOfOperation"
class LocationPositionType(AbstractType):
__resource_type__ = "LocationPosition"
class MarketingStatusType(AbstractType):
__resource_type__ = "MarketingStatus"
class MeasureType(AbstractType):
__resource_type__ = "Measure"
class MeasureGroupType(AbstractType):
__resource_type__ = "MeasureGroup"
class MeasureGroupPopulationType(AbstractType):
__resource_type__ = "MeasureGroupPopulation"
class MeasureGroupStratifierType(AbstractType):
__resource_type__ = "MeasureGroupStratifier"
class MeasureGroupStratifierComponentType(AbstractType):
__resource_type__ = "MeasureGroupStratifierComponent"
class MeasureReportType(AbstractType):
__resource_type__ = "MeasureReport"
class MeasureReportGroupType(AbstractType):
__resource_type__ = "MeasureReportGroup"
class MeasureReportGroupPopulationType(AbstractType):
__resource_type__ = "MeasureReportGroupPopulation"
class MeasureReportGroupStratifierType(AbstractType):
__resource_type__ = "MeasureReportGroupStratifier"
class MeasureReportGroupStratifierStratumType(AbstractType):
__resource_type__ = "MeasureReportGroupStratifierStratum"
class MeasureReportGroupStratifierStratumComponentType(AbstractType):
__resource_type__ = "MeasureReportGroupStratifierStratumComponent"
class MeasureReportGroupStratifierStratumPopulationType(AbstractType):
__resource_type__ = "MeasureReportGroupStratifierStratumPopulation"
class MeasureSupplementalDataType(AbstractType):
__resource_type__ = "MeasureSupplementalData"
class MediaType(AbstractType):
__resource_type__ = "Media"
class MedicationType(AbstractType):
__resource_type__ = "Medication"
class MedicationAdministrationType(AbstractType):
__resource_type__ = "MedicationAdministration"
class MedicationAdministrationDosageType(AbstractType):
__resource_type__ = "MedicationAdministrationDosage"
class MedicationAdministrationPerformerType(AbstractType):
__resource_type__ = "MedicationAdministrationPerformer"
class MedicationBatchType(AbstractType):
__resource_type__ = "MedicationBatch"
class MedicationDispenseType(AbstractType):
__resource_type__ = "MedicationDispense"
class MedicationDispensePerformerType(AbstractType):
__resource_type__ = "MedicationDispensePerformer"
class MedicationDispenseSubstitutionType(AbstractType):
__resource_type__ = "MedicationDispenseSubstitution"
class MedicationIngredientType(AbstractType):
__resource_type__ = "MedicationIngredient"
class MedicationKnowledgeType(AbstractType):
__resource_type__ = "MedicationKnowledge"
class MedicationKnowledgeAdministrationGuidelinesType(AbstractType):
__resource_type__ = "MedicationKnowledgeAdministrationGuidelines"
class MedicationKnowledgeAdministrationGuidelinesDosageType(AbstractType):
__resource_type__ = "MedicationKnowledgeAdministrationGuidelinesDosage"
class MedicationKnowledgeAdministrationGuidelinesPatientCharacteristicsType(
AbstractType
):
__resource_type__ = (
"MedicationKnowledgeAdministrationGuidelinesPatientCharacteristics"
)
class MedicationKnowledgeCostType(AbstractType):
__resource_type__ = "MedicationKnowledgeCost"
class MedicationKnowledgeDrugCharacteristicType(AbstractType):
__resource_type__ = "MedicationKnowledgeDrugCharacteristic"
class MedicationKnowledgeIngredientType(AbstractType):
__resource_type__ = "MedicationKnowledgeIngredient"
class MedicationKnowledgeKineticsType(AbstractType):
__resource_type__ = "MedicationKnowledgeKinetics"
class MedicationKnowledgeMedicineClassificationType(AbstractType):
__resource_type__ = "MedicationKnowledgeMedicineClassification"
class MedicationKnowledgeMonitoringProgramType(AbstractType):
__resource_type__ = "MedicationKnowledgeMonitoringProgram"
class MedicationKnowledgeMonographType(AbstractType):
__resource_type__ = "MedicationKnowledgeMonograph"
class MedicationKnowledgePackagingType(AbstractType):
__resource_type__ = "MedicationKnowledgePackaging"
class MedicationKnowledgeRegulatoryType(AbstractType):
__resource_type__ = "MedicationKnowledgeRegulatory"
class MedicationKnowledgeRegulatoryMaxDispenseType(AbstractType):
__resource_type__ = "MedicationKnowledgeRegulatoryMaxDispense"
class MedicationKnowledgeRegulatoryScheduleType(AbstractType):
__resource_type__ = "MedicationKnowledgeRegulatorySchedule"
class MedicationKnowledgeRegulatorySubstitutionType(AbstractType):
__resource_type__ = "MedicationKnowledgeRegulatorySubstitution"
class MedicationKnowledgeRelatedMedicationKnowledgeType(AbstractType):
__resource_type__ = "MedicationKnowledgeRelatedMedicationKnowledge"
class MedicationRequestType(AbstractType):
__resource_type__ = "MedicationRequest"
class MedicationRequestDispenseRequestType(AbstractType):
__resource_type__ = "MedicationRequestDispenseRequest"
class MedicationRequestDispenseRequestInitialFillType(AbstractType):
__resource_type__ = "MedicationRequestDispenseRequestInitialFill"
class MedicationRequestSubstitutionType(AbstractType):
__resource_type__ = "MedicationRequestSubstitution"
class MedicationStatementType(AbstractType):
__resource_type__ = "MedicationStatement"
class MedicinalProductType(AbstractType):
__resource_type__ = "MedicinalProduct"
class MedicinalProductAuthorizationType(AbstractType):
__resource_type__ = "MedicinalProductAuthorization"
class MedicinalProductAuthorizationJurisdictionalAuthorizationType(AbstractType):
__resource_type__ = "MedicinalProductAuthorizationJurisdictionalAuthorization"
class MedicinalProductAuthorizationProcedureType(AbstractType):
__resource_type__ = "MedicinalProductAuthorizationProcedure"
class MedicinalProductContraindicationType(AbstractType):
__resource_type__ = "MedicinalProductContraindication"
class MedicinalProductContraindicationOtherTherapyType(AbstractType):
__resource_type__ = "MedicinalProductContraindicationOtherTherapy"
class MedicinalProductIndicationType(AbstractType):
__resource_type__ = "MedicinalProductIndication"
class MedicinalProductIndicationOtherTherapyType(AbstractType):
__resource_type__ = "MedicinalProductIndicationOtherTherapy"
class MedicinalProductIngredientType(AbstractType):
__resource_type__ = "MedicinalProductIngredient"
class MedicinalProductIngredientSpecifiedSubstanceType(AbstractType):
__resource_type__ = "MedicinalProductIngredientSpecifiedSubstance"
class MedicinalProductIngredientSpecifiedSubstanceStrengthType(AbstractType):
__resource_type__ = "MedicinalProductIngredientSpecifiedSubstanceStrength"
class MedicinalProductIngredientSpecifiedSubstanceStrengthReferenceStrengthType(
AbstractType
):
__resource_type__ = (
"MedicinalProductIngredientSpecifiedSubstanceStrengthReferenceStrength"
)
class MedicinalProductIngredientSubstanceType(AbstractType):
__resource_type__ = "MedicinalProductIngredientSubstance"
class MedicinalProductInteractionType(AbstractType):
__resource_type__ = "MedicinalProductInteraction"
class MedicinalProductInteractionInteractantType(AbstractType):
__resource_type__ = "MedicinalProductInteractionInteractant"
class MedicinalProductManufacturedType(AbstractType):
__resource_type__ = "MedicinalProductManufactured"
class MedicinalProductManufacturingBusinessOperationType(AbstractType):
__resource_type__ = "MedicinalProductManufacturingBusinessOperation"
class MedicinalProductNameType(AbstractType):
__resource_type__ = "MedicinalProductName"
class MedicinalProductNameCountryLanguageType(AbstractType):
__resource_type__ = "MedicinalProductNameCountryLanguage"
class MedicinalProductNameNamePartType(AbstractType):
__resource_type__ = "MedicinalProductNameNamePart"
class MedicinalProductPackagedType(AbstractType):
__resource_type__ = "MedicinalProductPackaged"
class MedicinalProductPackagedBatchIdentifierType(AbstractType):
__resource_type__ = "MedicinalProductPackagedBatchIdentifier"
class MedicinalProductPackagedPackageItemType(AbstractType):
__resource_type__ = "MedicinalProductPackagedPackageItem"
class MedicinalProductPharmaceuticalType(AbstractType):
__resource_type__ = "MedicinalProductPharmaceutical"
class MedicinalProductPharmaceuticalCharacteristicsType(AbstractType):
__resource_type__ = "MedicinalProductPharmaceuticalCharacteristics"
class MedicinalProductPharmaceuticalRouteOfAdministrationType(AbstractType):
__resource_type__ = "MedicinalProductPharmaceuticalRouteOfAdministration"
class MedicinalProductPharmaceuticalRouteOfAdministrationTargetSpeciesType(
AbstractType
):
__resource_type__ = (
"MedicinalProductPharmaceuticalRouteOfAdministrationTargetSpecies"
)
class MedicinalProductPharmaceuticalRouteOfAdministrationTargetSpeciesWithdrawalPeriodType(
AbstractType
):
__resource_type__ = "MedicinalProductPharmaceuticalRouteOfAdministrationTargetSpeciesWithdrawalPeriod" # noqa:B950
class MedicinalProductSpecialDesignationType(AbstractType):
__resource_type__ = "MedicinalProductSpecialDesignation"
class MedicinalProductUndesirableEffectType(AbstractType):
__resource_type__ = "MedicinalProductUndesirableEffect"
class MessageDefinitionType(AbstractType):
__resource_type__ = "MessageDefinition"
class MessageDefinitionAllowedResponseType(AbstractType):
__resource_type__ = "MessageDefinitionAllowedResponse"
class MessageDefinitionFocusType(AbstractType):
__resource_type__ = "MessageDefinitionFocus"
class MessageHeaderType(AbstractType):
__resource_type__ = "MessageHeader"
class MessageHeaderDestinationType(AbstractType):
__resource_type__ = "MessageHeaderDestination"
class MessageHeaderResponseType(AbstractType):
__resource_type__ = "MessageHeaderResponse"
class MessageHeaderSourceType(AbstractType):
__resource_type__ = "MessageHeaderSource"
class MetaType(AbstractType):
__resource_type__ = "Meta"
class MetadataResourceType(AbstractType):
__resource_type__ = "MetadataResource"
class MolecularSequenceType(AbstractType):
__resource_type__ = "MolecularSequence"
class MolecularSequenceQualityType(AbstractType):
__resource_type__ = "MolecularSequenceQuality"
class MolecularSequenceQualityRocType(AbstractType):
__resource_type__ = "MolecularSequenceQualityRoc"
class MolecularSequenceReferenceSeqType(AbstractType):
__resource_type__ = "MolecularSequenceReferenceSeq"
class MolecularSequenceRepositoryType(AbstractType):
__resource_type__ = "MolecularSequenceRepository"
class MolecularSequenceStructureVariantType(AbstractType):
__resource_type__ = "MolecularSequenceStructureVariant"
class MolecularSequenceStructureVariantInnerType(AbstractType):
__resource_type__ = "MolecularSequenceStructureVariantInner"
class MolecularSequenceStructureVariantOuterType(AbstractType):
__resource_type__ = "MolecularSequenceStructureVariantOuter"
class MolecularSequenceVariantType(AbstractType):
__resource_type__ = "MolecularSequenceVariant"
class MoneyType(AbstractType):
__resource_type__ = "Money"
class NamingSystemType(AbstractType):
__resource_type__ = "NamingSystem"
class NamingSystemUniqueIdType(AbstractType):
__resource_type__ = "NamingSystemUniqueId"
class NarrativeType(AbstractType):
__resource_type__ = "Narrative"
class NutritionOrderType(AbstractType):
__resource_type__ = "NutritionOrder"
class NutritionOrderEnteralFormulaType(AbstractType):
__resource_type__ = "NutritionOrderEnteralFormula"
class NutritionOrderEnteralFormulaAdministrationType(AbstractType):
__resource_type__ = "NutritionOrderEnteralFormulaAdministration"
class NutritionOrderOralDietType(AbstractType):
__resource_type__ = "NutritionOrderOralDiet"
class NutritionOrderOralDietNutrientType(AbstractType):
__resource_type__ = "NutritionOrderOralDietNutrient"
class NutritionOrderOralDietTextureType(AbstractType):
__resource_type__ = "NutritionOrderOralDietTexture"
class NutritionOrderSupplementType(AbstractType):
__resource_type__ = "NutritionOrderSupplement"
class ObservationType(AbstractType):
__resource_type__ = "Observation"
class ObservationComponentType(AbstractType):
__resource_type__ = "ObservationComponent"
class ObservationDefinitionType(AbstractType):
__resource_type__ = "ObservationDefinition"
class ObservationDefinitionQualifiedIntervalType(AbstractType):
__resource_type__ = "ObservationDefinitionQualifiedInterval"
class ObservationDefinitionQuantitativeDetailsType(AbstractType):
__resource_type__ = "ObservationDefinitionQuantitativeDetails"
class ObservationReferenceRangeType(AbstractType):
__resource_type__ = "ObservationReferenceRange"
class OperationDefinitionType(AbstractType):
__resource_type__ = "OperationDefinition"
class OperationDefinitionOverloadType(AbstractType):
__resource_type__ = "OperationDefinitionOverload"
class OperationDefinitionParameterType(AbstractType):
__resource_type__ = "OperationDefinitionParameter"
class OperationDefinitionParameterBindingType(AbstractType):
__resource_type__ = "OperationDefinitionParameterBinding"
class OperationDefinitionParameterReferencedFromType(AbstractType):
__resource_type__ = "OperationDefinitionParameterReferencedFrom"
class OperationOutcomeType(AbstractType):
__resource_type__ = "OperationOutcome"
class OperationOutcomeIssueType(AbstractType):
__resource_type__ = "OperationOutcomeIssue"
class OrganizationType(AbstractType):
__resource_type__ = "Organization"
class OrganizationAffiliationType(AbstractType):
__resource_type__ = "OrganizationAffiliation"
class OrganizationContactType(AbstractType):
__resource_type__ = "OrganizationContact"
class ParameterDefinitionType(AbstractType):
__resource_type__ = "ParameterDefinition"
class ParametersType(AbstractType):
__resource_type__ = "Parameters"
class ParametersParameterType(AbstractType):
__resource_type__ = "ParametersParameter"
class PatientType(AbstractType):
__resource_type__ = "Patient"
class PatientCommunicationType(AbstractType):
__resource_type__ = "PatientCommunication"
class PatientContactType(AbstractType):
__resource_type__ = "PatientContact"
class PatientLinkType(AbstractType):
__resource_type__ = "PatientLink"
class PaymentNoticeType(AbstractType):
__resource_type__ = "PaymentNotice"
class PaymentReconciliationType(AbstractType):
__resource_type__ = "PaymentReconciliation"
class PaymentReconciliationDetailType(AbstractType):
__resource_type__ = "PaymentReconciliationDetail"
class PaymentReconciliationProcessNoteType(AbstractType):
__resource_type__ = "PaymentReconciliationProcessNote"
class PeriodType(AbstractType):
__resource_type__ = "Period"
class PersonType(AbstractType):
__resource_type__ = "Person"
class PersonLinkType(AbstractType):
__resource_type__ = "PersonLink"
class PlanDefinitionType(AbstractType):
__resource_type__ = "PlanDefinition"
class PlanDefinitionActionType(AbstractType):
__resource_type__ = "PlanDefinitionAction"
class PlanDefinitionActionConditionType(AbstractType):
__resource_type__ = "PlanDefinitionActionCondition"
class PlanDefinitionActionDynamicValueType(AbstractType):
__resource_type__ = "PlanDefinitionActionDynamicValue"
class PlanDefinitionActionParticipantType(AbstractType):
__resource_type__ = "PlanDefinitionActionParticipant"
class PlanDefinitionActionRelatedActionType(AbstractType):
__resource_type__ = "PlanDefinitionActionRelatedAction"
class PlanDefinitionGoalType(AbstractType):
__resource_type__ = "PlanDefinitionGoal"
class PlanDefinitionGoalTargetType(AbstractType):
__resource_type__ = "PlanDefinitionGoalTarget"
class PopulationType(AbstractType):
__resource_type__ = "Population"
class PractitionerType(AbstractType):
__resource_type__ = "Practitioner"
class PractitionerQualificationType(AbstractType):
__resource_type__ = "PractitionerQualification"
class PractitionerRoleType(AbstractType):
__resource_type__ = "PractitionerRole"
class PractitionerRoleAvailableTimeType(AbstractType):
__resource_type__ = "PractitionerRoleAvailableTime"
class PractitionerRoleNotAvailableType(AbstractType):
__resource_type__ = "PractitionerRoleNotAvailable"
class ProcedureType(AbstractType):
__resource_type__ = "Procedure"
class ProcedureFocalDeviceType(AbstractType):
__resource_type__ = "ProcedureFocalDevice"
class ProcedurePerformerType(AbstractType):
__resource_type__ = "ProcedurePerformer"
class ProdCharacteristicType(AbstractType):
__resource_type__ = "ProdCharacteristic"
class ProductShelfLifeType(AbstractType):
__resource_type__ = "ProductShelfLife"
class ProvenanceType(AbstractType):
__resource_type__ = "Provenance"
class ProvenanceAgentType(AbstractType):
__resource_type__ = "ProvenanceAgent"
class ProvenanceEntityType(AbstractType):
__resource_type__ = "ProvenanceEntity"
class QuantityType(AbstractType):
__resource_type__ = "Quantity"
class QuestionnaireType(AbstractType):
__resource_type__ = "Questionnaire"
class QuestionnaireItemType(AbstractType):
__resource_type__ = "QuestionnaireItem"
class QuestionnaireItemAnswerOptionType(AbstractType):
__resource_type__ = "QuestionnaireItemAnswerOption"
class QuestionnaireItemEnableWhenType(AbstractType):
__resource_type__ = "QuestionnaireItemEnableWhen"
class QuestionnaireItemInitialType(AbstractType):
__resource_type__ = "QuestionnaireItemInitial"
class QuestionnaireResponseType(AbstractType):
__resource_type__ = "QuestionnaireResponse"
class QuestionnaireResponseItemType(AbstractType):
__resource_type__ = "QuestionnaireResponseItem"
class QuestionnaireResponseItemAnswerType(AbstractType):
__resource_type__ = "QuestionnaireResponseItemAnswer"
class RangeType(AbstractType):
__resource_type__ = "Range"
class RatioType(AbstractType):
__resource_type__ = "Ratio"
class ReferenceType(AbstractType):
__resource_type__ = "Reference"
class RelatedArtifactType(AbstractType):
__resource_type__ = "RelatedArtifact"
class RelatedPersonType(AbstractType):
__resource_type__ = "RelatedPerson"
class RelatedPersonCommunicationType(AbstractType):
__resource_type__ = "RelatedPersonCommunication"
class RequestGroupType(AbstractType):
__resource_type__ = "RequestGroup"
class RequestGroupActionType(AbstractType):
__resource_type__ = "RequestGroupAction"
class RequestGroupActionConditionType(AbstractType):
__resource_type__ = "RequestGroupActionCondition"
class RequestGroupActionRelatedActionType(AbstractType):
__resource_type__ = "RequestGroupActionRelatedAction"
class ResearchDefinitionType(AbstractType):
__resource_type__ = "ResearchDefinition"
class ResearchElementDefinitionType(AbstractType):
__resource_type__ = "ResearchElementDefinition"
class ResearchElementDefinitionCharacteristicType(AbstractType):
__resource_type__ = "ResearchElementDefinitionCharacteristic"
class ResearchStudyType(AbstractType):
__resource_type__ = "ResearchStudy"
class ResearchStudyArmType(AbstractType):
__resource_type__ = "ResearchStudyArm"
class ResearchStudyObjectiveType(AbstractType):
__resource_type__ = "ResearchStudyObjective"
class ResearchSubjectType(AbstractType):
__resource_type__ = "ResearchSubject"
class RiskAssessmentType(AbstractType):
__resource_type__ = "RiskAssessment"
class RiskAssessmentPredictionType(AbstractType):
__resource_type__ = "RiskAssessmentPrediction"
class RiskEvidenceSynthesisType(AbstractType):
__resource_type__ = "RiskEvidenceSynthesis"
class RiskEvidenceSynthesisCertaintyType(AbstractType):
__resource_type__ = "RiskEvidenceSynthesisCertainty"
class RiskEvidenceSynthesisCertaintyCertaintySubcomponentType(AbstractType):
__resource_type__ = "RiskEvidenceSynthesisCertaintyCertaintySubcomponent"
class RiskEvidenceSynthesisRiskEstimateType(AbstractType):
__resource_type__ = "RiskEvidenceSynthesisRiskEstimate"
class RiskEvidenceSynthesisRiskEstimatePrecisionEstimateType(AbstractType):
__resource_type__ = "RiskEvidenceSynthesisRiskEstimatePrecisionEstimate"
class RiskEvidenceSynthesisSampleSizeType(AbstractType):
__resource_type__ = "RiskEvidenceSynthesisSampleSize"
class SampledDataType(AbstractType):
__resource_type__ = "SampledData"
class ScheduleType(AbstractType):
__resource_type__ = "Schedule"
class SearchParameterType(AbstractType):
__resource_type__ = "SearchParameter"
class SearchParameterComponentType(AbstractType):
__resource_type__ = "SearchParameterComponent"
class ServiceRequestType(AbstractType):
__resource_type__ = "ServiceRequest"
class SignatureType(AbstractType):
__resource_type__ = "Signature"
class SlotType(AbstractType):
__resource_type__ = "Slot"
class SpecimenType(AbstractType):
__resource_type__ = "Specimen"
class SpecimenCollectionType(AbstractType):
__resource_type__ = "SpecimenCollection"
class SpecimenContainerType(AbstractType):
__resource_type__ = "SpecimenContainer"
class SpecimenDefinitionType(AbstractType):
__resource_type__ = "SpecimenDefinition"
class SpecimenDefinitionTypeTestedType(AbstractType):
__resource_type__ = "SpecimenDefinitionTypeTested"
class SpecimenDefinitionTypeTestedContainerType(AbstractType):
__resource_type__ = "SpecimenDefinitionTypeTestedContainer"
class SpecimenDefinitionTypeTestedContainerAdditiveType(AbstractType):
__resource_type__ = "SpecimenDefinitionTypeTestedContainerAdditive"
class SpecimenDefinitionTypeTestedHandlingType(AbstractType):
__resource_type__ = "SpecimenDefinitionTypeTestedHandling"
class SpecimenProcessingType(AbstractType):
__resource_type__ = "SpecimenProcessing"
class StructureDefinitionType(AbstractType):
__resource_type__ = "StructureDefinition"
class StructureDefinitionContextType(AbstractType):
__resource_type__ = "StructureDefinitionContext"
class StructureDefinitionDifferentialType(AbstractType):
__resource_type__ = "StructureDefinitionDifferential"
class StructureDefinitionMappingType(AbstractType):
__resource_type__ = "StructureDefinitionMapping"
class StructureDefinitionSnapshotType(AbstractType):
__resource_type__ = "StructureDefinitionSnapshot"
class StructureMapType(AbstractType):
__resource_type__ = "StructureMap"
class StructureMapGroupType(AbstractType):
__resource_type__ = "StructureMapGroup"
class StructureMapGroupInputType(AbstractType):
__resource_type__ = "StructureMapGroupInput"
class StructureMapGroupRuleType(AbstractType):
__resource_type__ = "StructureMapGroupRule"
class StructureMapGroupRuleDependentType(AbstractType):
__resource_type__ = "StructureMapGroupRuleDependent"
class StructureMapGroupRuleSourceType(AbstractType):
__resource_type__ = "StructureMapGroupRuleSource"
class StructureMapGroupRuleTargetType(AbstractType):
__resource_type__ = "StructureMapGroupRuleTarget"
class StructureMapGroupRuleTargetParameterType(AbstractType):
__resource_type__ = "StructureMapGroupRuleTargetParameter"
class StructureMapStructureType(AbstractType):
__resource_type__ = "StructureMapStructure"
class SubscriptionType(AbstractType):
__resource_type__ = "Subscription"
class SubscriptionChannelType(AbstractType):
__resource_type__ = "SubscriptionChannel"
class SubstanceType(AbstractType):
__resource_type__ = "Substance"
class SubstanceAmountType(AbstractType):
__resource_type__ = "SubstanceAmount"
class SubstanceAmountReferenceRangeType(AbstractType):
__resource_type__ = "SubstanceAmountReferenceRange"
class SubstanceIngredientType(AbstractType):
__resource_type__ = "SubstanceIngredient"
class SubstanceInstanceType(AbstractType):
__resource_type__ = "SubstanceInstance"
class SubstanceNucleicAcidType(AbstractType):
__resource_type__ = "SubstanceNucleicAcid"
class SubstanceNucleicAcidSubunitType(AbstractType):
__resource_type__ = "SubstanceNucleicAcidSubunit"
class SubstanceNucleicAcidSubunitLinkageType(AbstractType):
__resource_type__ = "SubstanceNucleicAcidSubunitLinkage"
class SubstanceNucleicAcidSubunitSugarType(AbstractType):
__resource_type__ = "SubstanceNucleicAcidSubunitSugar"
class SubstancePolymerType(AbstractType):
__resource_type__ = "SubstancePolymer"
class SubstancePolymerMonomerSetType(AbstractType):
__resource_type__ = "SubstancePolymerMonomerSet"
class SubstancePolymerMonomerSetStartingMaterialType(AbstractType):
__resource_type__ = "SubstancePolymerMonomerSetStartingMaterial"
class SubstancePolymerRepeatType(AbstractType):
__resource_type__ = "SubstancePolymerRepeat"
class SubstancePolymerRepeatRepeatUnitType(AbstractType):
__resource_type__ = "SubstancePolymerRepeatRepeatUnit"
class SubstancePolymerRepeatRepeatUnitDegreeOfPolymerisationType(AbstractType):
__resource_type__ = "SubstancePolymerRepeatRepeatUnitDegreeOfPolymerisation"
class SubstancePolymerRepeatRepeatUnitStructuralRepresentationType(AbstractType):
__resource_type__ = "SubstancePolymerRepeatRepeatUnitStructuralRepresentation"
class SubstanceProteinType(AbstractType):
__resource_type__ = "SubstanceProtein"
class SubstanceProteinSubunitType(AbstractType):
__resource_type__ = "SubstanceProteinSubunit"
class SubstanceReferenceInformationType(AbstractType):
__resource_type__ = "SubstanceReferenceInformation"
class SubstanceReferenceInformationClassificationType(AbstractType):
__resource_type__ = "SubstanceReferenceInformationClassification"
class SubstanceReferenceInformationGeneType(AbstractType):
__resource_type__ = "SubstanceReferenceInformationGene"
class SubstanceReferenceInformationGeneElementType(AbstractType):
__resource_type__ = "SubstanceReferenceInformationGeneElement"
class SubstanceReferenceInformationTargetType(AbstractType):
__resource_type__ = "SubstanceReferenceInformationTarget"
class SubstanceSourceMaterialType(AbstractType):
__resource_type__ = "SubstanceSourceMaterial"
class SubstanceSourceMaterialFractionDescriptionType(AbstractType):
__resource_type__ = "SubstanceSourceMaterialFractionDescription"
class SubstanceSourceMaterialOrganismType(AbstractType):
__resource_type__ = "SubstanceSourceMaterialOrganism"
class SubstanceSourceMaterialOrganismAuthorType(AbstractType):
__resource_type__ = "SubstanceSourceMaterialOrganismAuthor"
class SubstanceSourceMaterialOrganismHybridType(AbstractType):
__resource_type__ = "SubstanceSourceMaterialOrganismHybrid"
class SubstanceSourceMaterialOrganismOrganismGeneralType(AbstractType):
__resource_type__ = "SubstanceSourceMaterialOrganismOrganismGeneral"
class SubstanceSourceMaterialPartDescriptionType(AbstractType):
__resource_type__ = "SubstanceSourceMaterialPartDescription"
class SubstanceSpecificationType(AbstractType):
__resource_type__ = "SubstanceSpecification"
class SubstanceSpecificationCodeType(AbstractType):
__resource_type__ = "SubstanceSpecificationCode"
class SubstanceSpecificationMoietyType(AbstractType):
__resource_type__ = "SubstanceSpecificationMoiety"
class SubstanceSpecificationNameType(AbstractType):
__resource_type__ = "SubstanceSpecificationName"
class SubstanceSpecificationNameOfficialType(AbstractType):
__resource_type__ = "SubstanceSpecificationNameOfficial"
class SubstanceSpecificationPropertyType(AbstractType):
__resource_type__ = "SubstanceSpecificationProperty"
class SubstanceSpecificationRelationshipType(AbstractType):
__resource_type__ = "SubstanceSpecificationRelationship"
class SubstanceSpecificationStructureType(AbstractType):
__resource_type__ = "SubstanceSpecificationStructure"
class SubstanceSpecificationStructureIsotopeType(AbstractType):
__resource_type__ = "SubstanceSpecificationStructureIsotope"
class SubstanceSpecificationStructureIsotopeMolecularWeightType(AbstractType):
__resource_type__ = "SubstanceSpecificationStructureIsotopeMolecularWeight"
class SubstanceSpecificationStructureRepresentationType(AbstractType):
__resource_type__ = "SubstanceSpecificationStructureRepresentation"
class SupplyDeliveryType(AbstractType):
__resource_type__ = "SupplyDelivery"
class SupplyDeliverySuppliedItemType(AbstractType):
__resource_type__ = "SupplyDeliverySuppliedItem"
class SupplyRequestType(AbstractType):
__resource_type__ = "SupplyRequest"
class SupplyRequestParameterType(AbstractType):
__resource_type__ = "SupplyRequestParameter"
class TaskType(AbstractType):
__resource_type__ = "Task"
class TaskInputType(AbstractType):
__resource_type__ = "TaskInput"
class TaskOutputType(AbstractType):
__resource_type__ = "TaskOutput"
class TaskRestrictionType(AbstractType):
__resource_type__ = "TaskRestriction"
class TerminologyCapabilitiesType(AbstractType):
__resource_type__ = "TerminologyCapabilities"
class TerminologyCapabilitiesClosureType(AbstractType):
__resource_type__ = "TerminologyCapabilitiesClosure"
class TerminologyCapabilitiesCodeSystemType(AbstractType):
__resource_type__ = "TerminologyCapabilitiesCodeSystem"
class TerminologyCapabilitiesCodeSystemVersionType(AbstractType):
__resource_type__ = "TerminologyCapabilitiesCodeSystemVersion"
class TerminologyCapabilitiesCodeSystemVersionFilterType(AbstractType):
__resource_type__ = "TerminologyCapabilitiesCodeSystemVersionFilter"
class TerminologyCapabilitiesExpansionType(AbstractType):
__resource_type__ = "TerminologyCapabilitiesExpansion"
class TerminologyCapabilitiesExpansionParameterType(AbstractType):
__resource_type__ = "TerminologyCapabilitiesExpansionParameter"
class TerminologyCapabilitiesImplementationType(AbstractType):
__resource_type__ = "TerminologyCapabilitiesImplementation"
class TerminologyCapabilitiesSoftwareType(AbstractType):
__resource_type__ = "TerminologyCapabilitiesSoftware"
class TerminologyCapabilitiesTranslationType(AbstractType):
__resource_type__ = "TerminologyCapabilitiesTranslation"
class TerminologyCapabilitiesValidateCodeType(AbstractType):
__resource_type__ = "TerminologyCapabilitiesValidateCode"
class TestReportType(AbstractType):
__resource_type__ = "TestReport"
class TestReportParticipantType(AbstractType):
__resource_type__ = "TestReportParticipant"
class TestReportSetupType(AbstractType):
__resource_type__ = "TestReportSetup"
class TestReportSetupActionType(AbstractType):
__resource_type__ = "TestReportSetupAction"
class TestReportSetupActionAssertType(AbstractType):
__resource_type__ = "TestReportSetupActionAssert"
class TestReportSetupActionOperationType(AbstractType):
__resource_type__ = "TestReportSetupActionOperation"
class TestReportTeardownType(AbstractType):
__resource_type__ = "TestReportTeardown"
class TestReportTeardownActionType(AbstractType):
__resource_type__ = "TestReportTeardownAction"
class TestReportTestType(AbstractType):
__resource_type__ = "TestReportTest"
class TestReportTestActionType(AbstractType):
__resource_type__ = "TestReportTestAction"
class TestScriptType(AbstractType):
__resource_type__ = "TestScript"
class TestScriptDestinationType(AbstractType):
__resource_type__ = "TestScriptDestination"
class TestScriptFixtureType(AbstractType):
__resource_type__ = "TestScriptFixture"
class TestScriptMetadataType(AbstractType):
__resource_type__ = "TestScriptMetadata"
class TestScriptMetadataCapabilityType(AbstractType):
__resource_type__ = "TestScriptMetadataCapability"
class TestScriptMetadataLinkType(AbstractType):
__resource_type__ = "TestScriptMetadataLink"
class TestScriptOriginType(AbstractType):
__resource_type__ = "TestScriptOrigin"
class TestScriptSetupType(AbstractType):
__resource_type__ = "TestScriptSetup"
class TestScriptSetupActionType(AbstractType):
__resource_type__ = "TestScriptSetupAction"
class TestScriptSetupActionAssertType(AbstractType):
__resource_type__ = "TestScriptSetupActionAssert"
class TestScriptSetupActionOperationType(AbstractType):
__resource_type__ = "TestScriptSetupActionOperation"
class TestScriptSetupActionOperationRequestHeaderType(AbstractType):
__resource_type__ = "TestScriptSetupActionOperationRequestHeader"
class TestScriptTeardownType(AbstractType):
__resource_type__ = "TestScriptTeardown"
class TestScriptTeardownActionType(AbstractType):
__resource_type__ = "TestScriptTeardownAction"
class TestScriptTestType(AbstractType):
__resource_type__ = "TestScriptTest"
class TestScriptTestActionType(AbstractType):
__resource_type__ = "TestScriptTestAction"
class TestScriptVariableType(AbstractType):
__resource_type__ = "TestScriptVariable"
class TimingType(AbstractType):
__resource_type__ = "Timing"
class TimingRepeatType(AbstractType):
__resource_type__ = "TimingRepeat"
class TriggerDefinitionType(AbstractType):
__resource_type__ = "TriggerDefinition"
class UsageContextType(AbstractType):
__resource_type__ = "UsageContext"
class ValueSetType(AbstractType):
__resource_type__ = "ValueSet"
class ValueSetComposeType(AbstractType):
__resource_type__ = "ValueSetCompose"
class ValueSetComposeIncludeType(AbstractType):
__resource_type__ = "ValueSetComposeInclude"
class ValueSetComposeIncludeConceptType(AbstractType):
__resource_type__ = "ValueSetComposeIncludeConcept"
class ValueSetComposeIncludeConceptDesignationType(AbstractType):
__resource_type__ = "ValueSetComposeIncludeConceptDesignation"
class ValueSetComposeIncludeFilterType(AbstractType):
__resource_type__ = "ValueSetComposeIncludeFilter"
class ValueSetExpansionType(AbstractType):
__resource_type__ = "ValueSetExpansion"
class ValueSetExpansionContainsType(AbstractType):
__resource_type__ = "ValueSetExpansionContains"
class ValueSetExpansionParameterType(AbstractType):
__resource_type__ = "ValueSetExpansionParameter"
class VerificationResultType(AbstractType):
__resource_type__ = "VerificationResult"
class VerificationResultAttestationType(AbstractType):
__resource_type__ = "VerificationResultAttestation"
class VerificationResultPrimarySourceType(AbstractType):
__resource_type__ = "VerificationResultPrimarySource"
class VerificationResultValidatorType(AbstractType):
__resource_type__ = "VerificationResultValidator"
class VisionPrescriptionType(AbstractType):
__resource_type__ = "VisionPrescription"
class VisionPrescriptionLensSpecificationType(AbstractType):
__resource_type__ = "VisionPrescriptionLensSpecification"
class VisionPrescriptionLensSpecificationPrismType(AbstractType):
__resource_type__ = "VisionPrescriptionLensSpecificationPrism"
__all__ = [
"Boolean",
"String",
"Base64Binary",
"Code",
"Id",
"Decimal",
"Integer",
"UnsignedInt",
"PositiveInt",
"Uri",
"Oid",
"Uuid",
"Canonical",
"Url",
"Markdown",
"Xhtml",
"Date",
"DateTime",
"Instant",
"Time",
"FHIRPrimitiveExtensionType",
"ElementType",
"ResourceType",
"AccountType",
"AccountCoverageType",
"AccountGuarantorType",
"ActivityDefinitionType",
"ActivityDefinitionDynamicValueType",
"ActivityDefinitionParticipantType",
"AddressType",
"AdverseEventType",
"AdverseEventSuspectEntityType",
"AdverseEventSuspectEntityCausalityType",
"AgeType",
"AllergyIntoleranceType",
"AllergyIntoleranceReactionType",
"AnnotationType",
"AppointmentType",
"AppointmentParticipantType",
"AppointmentResponseType",
"AttachmentType",
"AuditEventType",
"AuditEventAgentType",
"AuditEventAgentNetworkType",
"AuditEventEntityType",
"AuditEventEntityDetailType",
"AuditEventSourceType",
"BackboneElementType",
"BasicType",
"BinaryType",
"BiologicallyDerivedProductType",
"BiologicallyDerivedProductCollectionType",
"BiologicallyDerivedProductManipulationType",
"BiologicallyDerivedProductProcessingType",
"BiologicallyDerivedProductStorageType",
"BodyStructureType",
"BundleType",
"BundleEntryType",
"BundleEntryRequestType",
"BundleEntryResponseType",
"BundleEntrySearchType",
"BundleLinkType",
"CapabilityStatementType",
"CapabilityStatementDocumentType",
"CapabilityStatementImplementationType",
"CapabilityStatementMessagingType",
"CapabilityStatementMessagingEndpointType",
"CapabilityStatementMessagingSupportedMessageType",
"CapabilityStatementRestType",
"CapabilityStatementRestInteractionType",
"CapabilityStatementRestResourceType",
"CapabilityStatementRestResourceInteractionType",
"CapabilityStatementRestResourceOperationType",
"CapabilityStatementRestResourceSearchParamType",
"CapabilityStatementRestSecurityType",
"CapabilityStatementSoftwareType",
"CarePlanType",
"CarePlanActivityType",
"CarePlanActivityDetailType",
"CareTeamType",
"CareTeamParticipantType",
"CatalogEntryType",
"CatalogEntryRelatedEntryType",
"ChargeItemType",
"ChargeItemDefinitionType",
"ChargeItemDefinitionApplicabilityType",
"ChargeItemDefinitionPropertyGroupType",
"ChargeItemDefinitionPropertyGroupPriceComponentType",
"ChargeItemPerformerType",
"ClaimType",
"ClaimAccidentType",
"ClaimCareTeamType",
"ClaimDiagnosisType",
"ClaimInsuranceType",
"ClaimItemType",
"ClaimItemDetailType",
"ClaimItemDetailSubDetailType",
"ClaimPayeeType",
"ClaimProcedureType",
"ClaimRelatedType",
"ClaimResponseType",
"ClaimResponseAddItemType",
"ClaimResponseAddItemDetailType",
"ClaimResponseAddItemDetailSubDetailType",
"ClaimResponseErrorType",
"ClaimResponseInsuranceType",
"ClaimResponseItemType",
"ClaimResponseItemAdjudicationType",
"ClaimResponseItemDetailType",
"ClaimResponseItemDetailSubDetailType",
"ClaimResponsePaymentType",
"ClaimResponseProcessNoteType",
"ClaimResponseTotalType",
"ClaimSupportingInfoType",
"ClinicalImpressionType",
"ClinicalImpressionFindingType",
"ClinicalImpressionInvestigationType",
"CodeSystemType",
"CodeSystemConceptType",
"CodeSystemConceptDesignationType",
"CodeSystemConceptPropertyType",
"CodeSystemFilterType",
"CodeSystemPropertyType",
"CodeableConceptType",
"CodingType",
"CommunicationType",
"CommunicationPayloadType",
"CommunicationRequestType",
"CommunicationRequestPayloadType",
"CompartmentDefinitionType",
"CompartmentDefinitionResourceType",
"CompositionType",
"CompositionAttesterType",
"CompositionEventType",
"CompositionRelatesToType",
"CompositionSectionType",
"ConceptMapType",
"ConceptMapGroupType",
"ConceptMapGroupElementType",
"ConceptMapGroupElementTargetType",
"ConceptMapGroupElementTargetDependsOnType",
"ConceptMapGroupUnmappedType",
"ConditionType",
"ConditionEvidenceType",
"ConditionStageType",
"ConsentType",
"ConsentPolicyType",
"ConsentProvisionType",
"ConsentProvisionActorType",
"ConsentProvisionDataType",
"ConsentVerificationType",
"ContactDetailType",
"ContactPointType",
"ContractType",
"ContractContentDefinitionType",
"ContractFriendlyType",
"ContractLegalType",
"ContractRuleType",
"ContractSignerType",
"ContractTermType",
"ContractTermActionType",
"ContractTermActionSubjectType",
"ContractTermAssetType",
"ContractTermAssetContextType",
"ContractTermAssetValuedItemType",
"ContractTermOfferType",
"ContractTermOfferAnswerType",
"ContractTermOfferPartyType",
"ContractTermSecurityLabelType",
"ContributorType",
"CountType",
"CoverageType",
"CoverageClassType",
"CoverageCostToBeneficiaryType",
"CoverageCostToBeneficiaryExceptionType",
"CoverageEligibilityRequestType",
"CoverageEligibilityRequestInsuranceType",
"CoverageEligibilityRequestItemType",
"CoverageEligibilityRequestItemDiagnosisType",
"CoverageEligibilityRequestSupportingInfoType",
"CoverageEligibilityResponseType",
"CoverageEligibilityResponseErrorType",
"CoverageEligibilityResponseInsuranceType",
"CoverageEligibilityResponseInsuranceItemType",
"CoverageEligibilityResponseInsuranceItemBenefitType",
"DataRequirementType",
"DataRequirementCodeFilterType",
"DataRequirementDateFilterType",
"DataRequirementSortType",
"DetectedIssueType",
"DetectedIssueEvidenceType",
"DetectedIssueMitigationType",
"DeviceType",
"DeviceDefinitionType",
"DeviceDefinitionCapabilityType",
"DeviceDefinitionDeviceNameType",
"DeviceDefinitionMaterialType",
"DeviceDefinitionPropertyType",
"DeviceDefinitionSpecializationType",
"DeviceDefinitionUdiDeviceIdentifierType",
"DeviceDeviceNameType",
"DeviceMetricType",
"DeviceMetricCalibrationType",
"DevicePropertyType",
"DeviceRequestType",
"DeviceRequestParameterType",
"DeviceSpecializationType",
"DeviceUdiCarrierType",
"DeviceUseStatementType",
"DeviceVersionType",
"DiagnosticReportType",
"DiagnosticReportMediaType",
"DistanceType",
"DocumentManifestType",
"DocumentManifestRelatedType",
"DocumentReferenceType",
"DocumentReferenceContentType",
"DocumentReferenceContextType",
"DocumentReferenceRelatesToType",
"DomainResourceType",
"DosageType",
"DosageDoseAndRateType",
"DurationType",
"EffectEvidenceSynthesisType",
"EffectEvidenceSynthesisCertaintyType",
"EffectEvidenceSynthesisCertaintyCertaintySubcomponentType",
"EffectEvidenceSynthesisEffectEstimateType",
"EffectEvidenceSynthesisEffectEstimatePrecisionEstimateType",
"EffectEvidenceSynthesisResultsByExposureType",
"EffectEvidenceSynthesisSampleSizeType",
"ElementDefinitionType",
"ElementDefinitionBaseType",
"ElementDefinitionBindingType",
"ElementDefinitionConstraintType",
"ElementDefinitionExampleType",
"ElementDefinitionMappingType",
"ElementDefinitionSlicingType",
"ElementDefinitionSlicingDiscriminatorType",
"ElementDefinitionTypeType",
"EncounterType",
"EncounterClassHistoryType",
"EncounterDiagnosisType",
"EncounterHospitalizationType",
"EncounterLocationType",
"EncounterParticipantType",
"EncounterStatusHistoryType",
"EndpointType",
"EnrollmentRequestType",
"EnrollmentResponseType",
"EpisodeOfCareType",
"EpisodeOfCareDiagnosisType",
"EpisodeOfCareStatusHistoryType",
"EventDefinitionType",
"EvidenceType",
"EvidenceVariableType",
"EvidenceVariableCharacteristicType",
"ExampleScenarioType",
"ExampleScenarioActorType",
"ExampleScenarioInstanceType",
"ExampleScenarioInstanceContainedInstanceType",
"ExampleScenarioInstanceVersionType",
"ExampleScenarioProcessType",
"ExampleScenarioProcessStepType",
"ExampleScenarioProcessStepAlternativeType",
"ExampleScenarioProcessStepOperationType",
"ExplanationOfBenefitType",
"ExplanationOfBenefitAccidentType",
"ExplanationOfBenefitAddItemType",
"ExplanationOfBenefitAddItemDetailType",
"ExplanationOfBenefitAddItemDetailSubDetailType",
"ExplanationOfBenefitBenefitBalanceType",
"ExplanationOfBenefitBenefitBalanceFinancialType",
"ExplanationOfBenefitCareTeamType",
"ExplanationOfBenefitDiagnosisType",
"ExplanationOfBenefitInsuranceType",
"ExplanationOfBenefitItemType",
"ExplanationOfBenefitItemAdjudicationType",
"ExplanationOfBenefitItemDetailType",
"ExplanationOfBenefitItemDetailSubDetailType",
"ExplanationOfBenefitPayeeType",
"ExplanationOfBenefitPaymentType",
"ExplanationOfBenefitProcedureType",
"ExplanationOfBenefitProcessNoteType",
"ExplanationOfBenefitRelatedType",
"ExplanationOfBenefitSupportingInfoType",
"ExplanationOfBenefitTotalType",
"ExpressionType",
"ExtensionType",
"FamilyMemberHistoryType",
"FamilyMemberHistoryConditionType",
"FlagType",
"GoalType",
"GoalTargetType",
"GraphDefinitionType",
"GraphDefinitionLinkType",
"GraphDefinitionLinkTargetType",
"GraphDefinitionLinkTargetCompartmentType",
"GroupType",
"GroupCharacteristicType",
"GroupMemberType",
"GuidanceResponseType",
"HealthcareServiceType",
"HealthcareServiceAvailableTimeType",
"HealthcareServiceEligibilityType",
"HealthcareServiceNotAvailableType",
"HumanNameType",
"IdentifierType",
"ImagingStudyType",
"ImagingStudySeriesType",
"ImagingStudySeriesInstanceType",
"ImagingStudySeriesPerformerType",
"ImmunizationType",
"ImmunizationEducationType",
"ImmunizationEvaluationType",
"ImmunizationPerformerType",
"ImmunizationProtocolAppliedType",
"ImmunizationReactionType",
"ImmunizationRecommendationType",
"ImmunizationRecommendationRecommendationType",
"ImmunizationRecommendationRecommendationDateCriterionType",
"ImplementationGuideType",
"ImplementationGuideDefinitionType",
"ImplementationGuideDefinitionGroupingType",
"ImplementationGuideDefinitionPageType",
"ImplementationGuideDefinitionParameterType",
"ImplementationGuideDefinitionResourceType",
"ImplementationGuideDefinitionTemplateType",
"ImplementationGuideDependsOnType",
"ImplementationGuideGlobalType",
"ImplementationGuideManifestType",
"ImplementationGuideManifestPageType",
"ImplementationGuideManifestResourceType",
"InsurancePlanType",
"InsurancePlanContactType",
"InsurancePlanCoverageType",
"InsurancePlanCoverageBenefitType",
"InsurancePlanCoverageBenefitLimitType",
"InsurancePlanPlanType",
"InsurancePlanPlanGeneralCostType",
"InsurancePlanPlanSpecificCostType",
"InsurancePlanPlanSpecificCostBenefitType",
"InsurancePlanPlanSpecificCostBenefitCostType",
"InvoiceType",
"InvoiceLineItemType",
"InvoiceLineItemPriceComponentType",
"InvoiceParticipantType",
"LibraryType",
"LinkageType",
"LinkageItemType",
"ListType",
"ListEntryType",
"LocationType",
"LocationHoursOfOperationType",
"LocationPositionType",
"MarketingStatusType",
"MeasureType",
"MeasureGroupType",
"MeasureGroupPopulationType",
"MeasureGroupStratifierType",
"MeasureGroupStratifierComponentType",
"MeasureReportType",
"MeasureReportGroupType",
"MeasureReportGroupPopulationType",
"MeasureReportGroupStratifierType",
"MeasureReportGroupStratifierStratumType",
"MeasureReportGroupStratifierStratumComponentType",
"MeasureReportGroupStratifierStratumPopulationType",
"MeasureSupplementalDataType",
"MediaType",
"MedicationType",
"MedicationAdministrationType",
"MedicationAdministrationDosageType",
"MedicationAdministrationPerformerType",
"MedicationBatchType",
"MedicationDispenseType",
"MedicationDispensePerformerType",
"MedicationDispenseSubstitutionType",
"MedicationIngredientType",
"MedicationKnowledgeType",
"MedicationKnowledgeAdministrationGuidelinesType",
"MedicationKnowledgeAdministrationGuidelinesDosageType",
"MedicationKnowledgeAdministrationGuidelinesPatientCharacteristicsType",
"MedicationKnowledgeCostType",
"MedicationKnowledgeDrugCharacteristicType",
"MedicationKnowledgeIngredientType",
"MedicationKnowledgeKineticsType",
"MedicationKnowledgeMedicineClassificationType",
"MedicationKnowledgeMonitoringProgramType",
"MedicationKnowledgeMonographType",
"MedicationKnowledgePackagingType",
"MedicationKnowledgeRegulatoryType",
"MedicationKnowledgeRegulatoryMaxDispenseType",
"MedicationKnowledgeRegulatoryScheduleType",
"MedicationKnowledgeRegulatorySubstitutionType",
"MedicationKnowledgeRelatedMedicationKnowledgeType",
"MedicationRequestType",
"MedicationRequestDispenseRequestType",
"MedicationRequestDispenseRequestInitialFillType",
"MedicationRequestSubstitutionType",
"MedicationStatementType",
"MedicinalProductType",
"MedicinalProductAuthorizationType",
"MedicinalProductAuthorizationJurisdictionalAuthorizationType",
"MedicinalProductAuthorizationProcedureType",
"MedicinalProductContraindicationType",
"MedicinalProductContraindicationOtherTherapyType",
"MedicinalProductIndicationType",
"MedicinalProductIndicationOtherTherapyType",
"MedicinalProductIngredientType",
"MedicinalProductIngredientSpecifiedSubstanceType",
"MedicinalProductIngredientSpecifiedSubstanceStrengthType",
"MedicinalProductIngredientSpecifiedSubstanceStrengthReferenceStrengthType",
"MedicinalProductIngredientSubstanceType",
"MedicinalProductInteractionType",
"MedicinalProductInteractionInteractantType",
"MedicinalProductManufacturedType",
"MedicinalProductManufacturingBusinessOperationType",
"MedicinalProductNameType",
"MedicinalProductNameCountryLanguageType",
"MedicinalProductNameNamePartType",
"MedicinalProductPackagedType",
"MedicinalProductPackagedBatchIdentifierType",
"MedicinalProductPackagedPackageItemType",
"MedicinalProductPharmaceuticalType",
"MedicinalProductPharmaceuticalCharacteristicsType",
"MedicinalProductPharmaceuticalRouteOfAdministrationType",
"MedicinalProductPharmaceuticalRouteOfAdministrationTargetSpeciesType",
"MedicinalProductPharmaceuticalRouteOfAdministrationTargetSpeciesWithdrawalPeriodType",
"MedicinalProductSpecialDesignationType",
"MedicinalProductUndesirableEffectType",
"MessageDefinitionType",
"MessageDefinitionAllowedResponseType",
"MessageDefinitionFocusType",
"MessageHeaderType",
"MessageHeaderDestinationType",
"MessageHeaderResponseType",
"MessageHeaderSourceType",
"MetaType",
"MetadataResourceType",
"MolecularSequenceType",
"MolecularSequenceQualityType",
"MolecularSequenceQualityRocType",
"MolecularSequenceReferenceSeqType",
"MolecularSequenceRepositoryType",
"MolecularSequenceStructureVariantType",
"MolecularSequenceStructureVariantInnerType",
"MolecularSequenceStructureVariantOuterType",
"MolecularSequenceVariantType",
"MoneyType",
"NamingSystemType",
"NamingSystemUniqueIdType",
"NarrativeType",
"NutritionOrderType",
"NutritionOrderEnteralFormulaType",
"NutritionOrderEnteralFormulaAdministrationType",
"NutritionOrderOralDietType",
"NutritionOrderOralDietNutrientType",
"NutritionOrderOralDietTextureType",
"NutritionOrderSupplementType",
"ObservationType",
"ObservationComponentType",
"ObservationDefinitionType",
"ObservationDefinitionQualifiedIntervalType",
"ObservationDefinitionQuantitativeDetailsType",
"ObservationReferenceRangeType",
"OperationDefinitionType",
"OperationDefinitionOverloadType",
"OperationDefinitionParameterType",
"OperationDefinitionParameterBindingType",
"OperationDefinitionParameterReferencedFromType",
"OperationOutcomeType",
"OperationOutcomeIssueType",
"OrganizationType",
"OrganizationAffiliationType",
"OrganizationContactType",
"ParameterDefinitionType",
"ParametersType",
"ParametersParameterType",
"PatientType",
"PatientCommunicationType",
"PatientContactType",
"PatientLinkType",
"PaymentNoticeType",
"PaymentReconciliationType",
"PaymentReconciliationDetailType",
"PaymentReconciliationProcessNoteType",
"PeriodType",
"PersonType",
"PersonLinkType",
"PlanDefinitionType",
"PlanDefinitionActionType",
"PlanDefinitionActionConditionType",
"PlanDefinitionActionDynamicValueType",
"PlanDefinitionActionParticipantType",
"PlanDefinitionActionRelatedActionType",
"PlanDefinitionGoalType",
"PlanDefinitionGoalTargetType",
"PopulationType",
"PractitionerType",
"PractitionerQualificationType",
"PractitionerRoleType",
"PractitionerRoleAvailableTimeType",
"PractitionerRoleNotAvailableType",
"ProcedureType",
"ProcedureFocalDeviceType",
"ProcedurePerformerType",
"ProdCharacteristicType",
"ProductShelfLifeType",
"ProvenanceType",
"ProvenanceAgentType",
"ProvenanceEntityType",
"QuantityType",
"QuestionnaireType",
"QuestionnaireItemType",
"QuestionnaireItemAnswerOptionType",
"QuestionnaireItemEnableWhenType",
"QuestionnaireItemInitialType",
"QuestionnaireResponseType",
"QuestionnaireResponseItemType",
"QuestionnaireResponseItemAnswerType",
"RangeType",
"RatioType",
"ReferenceType",
"RelatedArtifactType",
"RelatedPersonType",
"RelatedPersonCommunicationType",
"RequestGroupType",
"RequestGroupActionType",
"RequestGroupActionConditionType",
"RequestGroupActionRelatedActionType",
"ResearchDefinitionType",
"ResearchElementDefinitionType",
"ResearchElementDefinitionCharacteristicType",
"ResearchStudyType",
"ResearchStudyArmType",
"ResearchStudyObjectiveType",
"ResearchSubjectType",
"RiskAssessmentType",
"RiskAssessmentPredictionType",
"RiskEvidenceSynthesisType",
"RiskEvidenceSynthesisCertaintyType",
"RiskEvidenceSynthesisCertaintyCertaintySubcomponentType",
"RiskEvidenceSynthesisRiskEstimateType",
"RiskEvidenceSynthesisRiskEstimatePrecisionEstimateType",
"RiskEvidenceSynthesisSampleSizeType",
"SampledDataType",
"ScheduleType",
"SearchParameterType",
"SearchParameterComponentType",
"ServiceRequestType",
"SignatureType",
"SlotType",
"SpecimenType",
"SpecimenCollectionType",
"SpecimenContainerType",
"SpecimenDefinitionType",
"SpecimenDefinitionTypeTestedType",
"SpecimenDefinitionTypeTestedContainerType",
"SpecimenDefinitionTypeTestedContainerAdditiveType",
"SpecimenDefinitionTypeTestedHandlingType",
"SpecimenProcessingType",
"StructureDefinitionType",
"StructureDefinitionContextType",
"StructureDefinitionDifferentialType",
"StructureDefinitionMappingType",
"StructureDefinitionSnapshotType",
"StructureMapType",
"StructureMapGroupType",
"StructureMapGroupInputType",
"StructureMapGroupRuleType",
"StructureMapGroupRuleDependentType",
"StructureMapGroupRuleSourceType",
"StructureMapGroupRuleTargetType",
"StructureMapGroupRuleTargetParameterType",
"StructureMapStructureType",
"SubscriptionType",
"SubscriptionChannelType",
"SubstanceType",
"SubstanceAmountType",
"SubstanceAmountReferenceRangeType",
"SubstanceIngredientType",
"SubstanceInstanceType",
"SubstanceNucleicAcidType",
"SubstanceNucleicAcidSubunitType",
"SubstanceNucleicAcidSubunitLinkageType",
"SubstanceNucleicAcidSubunitSugarType",
"SubstancePolymerType",
"SubstancePolymerMonomerSetType",
"SubstancePolymerMonomerSetStartingMaterialType",
"SubstancePolymerRepeatType",
"SubstancePolymerRepeatRepeatUnitType",
"SubstancePolymerRepeatRepeatUnitDegreeOfPolymerisationType",
"SubstancePolymerRepeatRepeatUnitStructuralRepresentationType",
"SubstanceProteinType",
"SubstanceProteinSubunitType",
"SubstanceReferenceInformationType",
"SubstanceReferenceInformationClassificationType",
"SubstanceReferenceInformationGeneType",
"SubstanceReferenceInformationGeneElementType",
"SubstanceReferenceInformationTargetType",
"SubstanceSourceMaterialType",
"SubstanceSourceMaterialFractionDescriptionType",
"SubstanceSourceMaterialOrganismType",
"SubstanceSourceMaterialOrganismAuthorType",
"SubstanceSourceMaterialOrganismHybridType",
"SubstanceSourceMaterialOrganismOrganismGeneralType",
"SubstanceSourceMaterialPartDescriptionType",
"SubstanceSpecificationType",
"SubstanceSpecificationCodeType",
"SubstanceSpecificationMoietyType",
"SubstanceSpecificationNameType",
"SubstanceSpecificationNameOfficialType",
"SubstanceSpecificationPropertyType",
"SubstanceSpecificationRelationshipType",
"SubstanceSpecificationStructureType",
"SubstanceSpecificationStructureIsotopeType",
"SubstanceSpecificationStructureIsotopeMolecularWeightType",
"SubstanceSpecificationStructureRepresentationType",
"SupplyDeliveryType",
"SupplyDeliverySuppliedItemType",
"SupplyRequestType",
"SupplyRequestParameterType",
"TaskType",
"TaskInputType",
"TaskOutputType",
"TaskRestrictionType",
"TerminologyCapabilitiesType",
"TerminologyCapabilitiesClosureType",
"TerminologyCapabilitiesCodeSystemType",
"TerminologyCapabilitiesCodeSystemVersionType",
"TerminologyCapabilitiesCodeSystemVersionFilterType",
"TerminologyCapabilitiesExpansionType",
"TerminologyCapabilitiesExpansionParameterType",
"TerminologyCapabilitiesImplementationType",
"TerminologyCapabilitiesSoftwareType",
"TerminologyCapabilitiesTranslationType",
"TerminologyCapabilitiesValidateCodeType",
"TestReportType",
"TestReportParticipantType",
"TestReportSetupType",
"TestReportSetupActionType",
"TestReportSetupActionAssertType",
"TestReportSetupActionOperationType",
"TestReportTeardownType",
"TestReportTeardownActionType",
"TestReportTestType",
"TestReportTestActionType",
"TestScriptType",
"TestScriptDestinationType",
"TestScriptFixtureType",
"TestScriptMetadataType",
"TestScriptMetadataCapabilityType",
"TestScriptMetadataLinkType",
"TestScriptOriginType",
"TestScriptSetupType",
"TestScriptSetupActionType",
"TestScriptSetupActionAssertType",
"TestScriptSetupActionOperationType",
"TestScriptSetupActionOperationRequestHeaderType",
"TestScriptTeardownType",
"TestScriptTeardownActionType",
"TestScriptTestType",
"TestScriptTestActionType",
"TestScriptVariableType",
"TimingType",
"TimingRepeatType",
"TriggerDefinitionType",
"UsageContextType",
"ValueSetType",
"ValueSetComposeType",
"ValueSetComposeIncludeType",
"ValueSetComposeIncludeConceptType",
"ValueSetComposeIncludeConceptDesignationType",
"ValueSetComposeIncludeFilterType",
"ValueSetExpansionType",
"ValueSetExpansionContainsType",
"ValueSetExpansionParameterType",
"VerificationResultType",
"VerificationResultAttestationType",
"VerificationResultPrimarySourceType",
"VerificationResultValidatorType",
"VisionPrescriptionType",
"VisionPrescriptionLensSpecificationType",
"VisionPrescriptionLensSpecificationPrismType",
]
| 27.914115
| 119
| 0.789266
|
4353b06a0ce18b0487c06aeaaa36b25441f0234f
| 3,087
|
py
|
Python
|
SGD2.py
|
parrt/gmdh
|
77d54c35ed22e007098c6105066b6bae18ed364a
|
[
"BSD-2-Clause"
] | 3
|
2017-02-09T14:34:49.000Z
|
2020-03-23T15:50:08.000Z
|
SGD2.py
|
parrt/gmdh
|
77d54c35ed22e007098c6105066b6bae18ed364a
|
[
"BSD-2-Clause"
] | 1
|
2017-05-23T06:08:12.000Z
|
2017-05-23T17:42:10.000Z
|
SGD2.py
|
parrt/gmdh
|
77d54c35ed22e007098c6105066b6bae18ed364a
|
[
"BSD-2-Clause"
] | 1
|
2019-11-20T20:56:31.000Z
|
2019-11-20T20:56:31.000Z
|
import numpy as np
import gzip, cPickle
from numpy import linalg as LA
from collections import Counter
from decimal import Decimal
import random
from network2 import Network2, init_index_map
# Load the dataset
f = gzip.open('/Users/parrt/data/mnist.pkl.gz', 'rb')
train_set, valid_set, test_set = cPickle.load(f)
f.close()
images = train_set[0]
labels = train_set[1]
img = images[1]
# use just a few images
N = 1
# N = len(images)
X = images[0:N]
Y = labels[0:N]
# Make one-hot-vectors
# Y = [onehot(lab) for lab in labels[0:N]]
print "N =",N
# init_index_map([784,15,10])
init_index_map([784,15,10])
pos = Network2([784,15,10])
num_parameters = pos.size()
print "num parameters =", num_parameters
precision = 0.000000000001
eta = 40
steps = 0
h = 0.00001
cost = 1e20
NPARTIALS = 1
MINIBATCH = 30
print "NPARTIALS =", NPARTIALS
print "MINIBATCH =", MINIBATCH
print "eta =", eta
def compute_finite_diff(pos, d):
save = pos.get_parameter(d)
pos.add_to_parameter(d, h)
right = pos.cost(samples, sample_labels)
pos.set_parameter(d, save)
pos.add_to_parameter(d, -h)
left = pos.cost(samples, sample_labels)
pos.set_parameter(d, save) # restore position vector
return (right - left) / (2 * h)
while True:
steps += 1
prevcost = cost
# what is cost at current location?
# indexes = np.random.randint(0,len(X),size=MINIBATCH)
# samples = X[indexes]
# sample_labels = labels[indexes]
samples = X
sample_labels = labels
# compute finite difference for one parameter
# (f(pos+h) - f(pos-h)) / 2h
save = [0]*NPARTIALS
d = [0]*NPARTIALS
for i in range(NPARTIALS):
d[i] = random.randint(0,num_parameters-1) # randint() is inclusive on both ends
save[i] = pos.get_parameter(d[i])
finite_diff = compute_finite_diff(pos,d[i])
# move position in one direction
pos.add_to_parameter(d[i], -eta * finite_diff)
# delta = Decimal(cost) - Decimal(prevcost)
cost = pos.cost(samples, sample_labels) # what is new cost
if steps % 2000 == 0:
correct = pos.fitness(X,Y)
print "%d: cost = %3.5f, correct %d, weight norm neuron 0,0: %3.3f" % \
(steps, cost, correct, LA.norm(pos.weights[0][0]))
# print "%d: cost = %3.5f, weight norm neuron 0,0: %3.3f" %\
# (steps,cost,LA.norm(pos.weights[0][0]))
if cost > prevcost:
lossratio = (cost - prevcost) / prevcost
if lossratio > 0.035: # even sigmoid seems to get these weird pop ups in energy so don't let it
# print "lossratio by %3.5f" % lossratio
for i in range(NPARTIALS):
pos.set_parameter(d[i], save[i]) # restore so we can try again
# print "resetting to cost %3.5f from pop up %3.5f" % (prevcost,cost)
cost = prevcost # reset cost too lest it think it hadn't jumped much next iteration
# stop when small change in vertical but not heading down
# Sometimes subtraction wipes out precision and we get an actual 0.0
# if delta >= 0 and abs(delta) < precision:
# break
| 30.564356
| 103
| 0.646259
|
1d9c761f3c83798fd6723d0da3f054681695f01a
| 5,417
|
py
|
Python
|
Environment.py
|
zhangks93/ASPS
|
02f1f8cd563cdccea01dedc950fa38c570dc996e
|
[
"MIT"
] | 2
|
2021-03-26T00:17:44.000Z
|
2021-08-17T12:23:56.000Z
|
Environment.py
|
zhangks93/ASPS
|
02f1f8cd563cdccea01dedc950fa38c570dc996e
|
[
"MIT"
] | null | null | null |
Environment.py
|
zhangks93/ASPS
|
02f1f8cd563cdccea01dedc950fa38c570dc996e
|
[
"MIT"
] | null | null | null |
import influent as If
import reactor as Ra
import clarifier as Ca
import constant
import pandas as pd
import numpy as np
class WWTP():
def __init__(self):
self.Influent=If.influent('influent',0,'data/bsm1LT.xlsx')
self.A=Ra.bioreactor('Reactor1',0,1000)
self.B=Ra.bioreactor('Reactor2',0,1000)
self.C=Ra.bioreactor('Reactor3',0,1333)
self.D=Ra.bioreactor('Reactor4',0,1333)
self.E=Ra.bioreactor('Reactor5',0,1333)
self.A.set_comps([30,2.81,1149,82.1,2552,148,449,0.0043,5.37,7.92,1.22,5.28,4.93,3285])
self.B.set_comps([30,1.46,1149,76.4,2553,148,450,0.0000631,3.66,8.34,0.882,5.03,5.08,3282])
self.C.set_comps([30,1.15,1149,64.9,2557,149,450,1.72,6.54,5.55,0.829,4.39,4.67,3278])
self.D.set_comps([30,0.995,1149,55.7,2559,150,451,2.43,9.3,2.97,0.767,3.88,4.29,3274])
self.E.set_comps([30,0.889,1149,49.3,2559,150,452,0.491,10.4,1.73,0.688,3.53,4.13,3270])
self.log_A=pd.DataFrame(columns=['Si','Ss','Xi','Xs','Xbh','Xba','Xp','So','Sno','Snh','Snd','Xnd','Salk','TSS','KLa','Ks','μH','KNO','KOH','bH','ηg','ηh','kh','Kx','μA','KNH','ka','KOA','bA','out_flow_main','out_flow_side'])
self.log_B=pd.DataFrame(columns=['Si','Ss','Xi','Xs','Xbh','Xba','Xp','So','Sno','Snh','Snd','Xnd','Salk','TSS','KLa','Ks','μH','KNO','KOH','bH','ηg','ηh','kh','Kx','μA','KNH','ka','KOA','bA','out_flow_main','out_flow_side'])
self.log_C=pd.DataFrame(columns=['Si','Ss','Xi','Xs','Xbh','Xba','Xp','So','Sno','Snh','Snd','Xnd','Salk','TSS','KLa','Ks','μH','KNO','KOH','bH','ηg','ηh','kh','Kx','μA','KNH','ka','KOA','bA','out_flow_main','out_flow_side'])
self.log_D=pd.DataFrame(columns=['Si','Ss','Xi','Xs','Xbh','Xba','Xp','So','Sno','Snh','Snd','Xnd','Salk','TSS','KLa','Ks','μH','KNO','KOH','bH','ηg','ηh','kh','Kx','μA','KNH','ka','KOA','bA','out_flow_main','out_flow_side'])
self.log_E=pd.DataFrame(columns=['Si','Ss','Xi','Xs','Xbh','Xba','Xp','So','Sno','Snh','Snd','Xnd','Salk','TSS','KLa','Ks','μH','KNO','KOH','bH','ηg','ηh','kh','Kx','μA','KNH','ka','KOA','bA','out_flow_main','out_flow_side'])
def pipe_connect(self):
self.A.add_upstream(self.Influent,'Main')
self.B.add_upstream(self.A,'Main')
self.C.add_upstream(self.B,'Main')
self.D.add_upstream(self.C,'Main')
self.E.add_upstream(self.D,'Main')
self.E.set_outflow_side(10)
self.A.add_upstream(self.E,'Side')
def run(self):
self.Influent.update_inflow()
self.Influent.update_comps()
self.Influent.update_outflow_main()
self.A.update_inflow(self.Influent)
self.A.update_outflow_main()
self.B.update_inflow(self.A)
self.B.update_outflow_main()
self.C.update_inflow(self.B)
self.C.update_outflow_main()
self.D.update_inflow(self.C)
self.D.update_outflow_main()
self.E.update_inflow(self.D)
self.E.update_outflow_main()
self.log_A.loc[self.A.time,0:14]=self.A.get_comps()
self.log_A.loc[self.A.time,14:15]=self.A.KLa
self.log_A.loc[self.A.time,15:29]=self.A.kin_paras
self.log_A.loc[self.A.time,29:30]=self.A.outflow_main
self.log_A.loc[self.A.time,30:31]=self.A.outflow_side
self.A.biodegrade(self.Influent.temp,self.Influent,self.E)
self.log_B.loc[self.B.time,0:14]=self.B.get_comps()
self.log_B.loc[self.B.time,14:15]=self.B.KLa
self.log_B.loc[self.B.time,15:29]=self.B.kin_paras
self.log_B.loc[self.B.time,29:30]=self.B.outflow_main
self.log_B.loc[self.B.time,30:31]=self.B.outflow_side
self.B.biodegrade(self.Influent.temp,self.A)
self.log_C.loc[self.C.time,0:14]=self.C.get_comps()
self.log_C.loc[self.C.time,14:15]=self.C.KLa
self.log_C.loc[self.C.time,15:29]=self.C.kin_paras
self.log_C.loc[self.C.time,29:30]=self.C.outflow_main
self.log_C.loc[self.C.time,30:31]=self.C.outflow_side
self.C.biodegrade(self.Influent.temp,self.B)
self.log_D.loc[self.D.time,0:14]=self.D.get_comps()
self.log_D.loc[self.D.time,14:15]=self.D.KLa
self.log_D.loc[self.D.time,15:29]=self.D.kin_paras
self.log_D.loc[self.D.time,29:30]=self.D.outflow_main
self.log_D.loc[self.D.time,30:31]=self.D.outflow_side
self.D.biodegrade(self.Influent.temp,self.C)
self.log_E.loc[self.E.time,0:14]=self.E.get_comps()
self.log_E.loc[self.E.time,14:15]=self.E.KLa
self.log_E.loc[self.E.time,15:29]=self.E.kin_paras
self.log_E.loc[self.E.time,29:30]=self.E.outflow_main
self.log_E.loc[self.E.time,30:31]=self.E.outflow_side
self.E.biodegrade(self.Influent.temp,self.D)
self.Influent.time=self.Influent.time+1
def control(op_para):
#op_para=[KLa,outflow_side]
self.E.set_KLa(op_para[0])
self.E.set_outflow_side(op_para[1])
A=WWTP()
A.pipe_connect()
for t in range (700):
A.run()
writer = pd.ExcelWriter('log.xlsx')
A.log_A.to_excel(writer,'log_A')
A.log_B.to_excel(writer,'log_B')
A.log_C.to_excel(writer,'log_C')
A.log_D.to_excel(writer,'log_D')
A.log_E.to_excel(writer,'log_E')
writer.save()
| 55.845361
| 234
| 0.602363
|
b3b78fe7db9285dc545bf40341c632ed871c1545
| 565
|
py
|
Python
|
makegbs.py
|
DevEd2/HokeyPokey-GB
|
b7d08b8c3eadc5f19519bfb2961cda7eacf11c6f
|
[
"MIT"
] | null | null | null |
makegbs.py
|
DevEd2/HokeyPokey-GB
|
b7d08b8c3eadc5f19519bfb2961cda7eacf11c6f
|
[
"MIT"
] | null | null | null |
makegbs.py
|
DevEd2/HokeyPokey-GB
|
b7d08b8c3eadc5f19519bfb2961cda7eacf11c6f
|
[
"MIT"
] | null | null | null |
# makegbs.py - create GBS file from GBSHeader.bin and DevSound.gb
# open files
HdrFile = open("GBSHeader.bin", "rb") # GBS header
ROMFile = open("HokeyPokey.gbc", "rb") # demo ROM
OutFile = open("HokeyPokey.gbs", "wb") # output file
# copy header
OutFile.write(HdrFile.read(0x70)) # write GBS header
# copy DevSound + song data
ROMFile.seek(0x4000) # relevant data starts at offset 0x4000
OutFile.write(ROMFile.read(0x4000)) # write song data
# close files
HdrFile.close()
ROMFile.close()
OutFile.close()
| 29.736842
| 80
| 0.656637
|
dd00e200c627f7efdffc0413b9f09a77924f50b4
| 81,841
|
py
|
Python
|
src/transformers/models/bert/modeling_bert.py
|
bhavika/transformers
|
65cf33e7e53cd46313f3655f274b3f6ca0fd679d
|
[
"Apache-2.0"
] | 1
|
2022-03-16T13:02:15.000Z
|
2022-03-16T13:02:15.000Z
|
src/transformers/models/bert/modeling_bert.py
|
bhavika/transformers
|
65cf33e7e53cd46313f3655f274b3f6ca0fd679d
|
[
"Apache-2.0"
] | 2
|
2022-03-14T10:13:16.000Z
|
2022-03-14T11:50:27.000Z
|
src/transformers/models/bert/modeling_bert.py
|
bhavika/transformers
|
65cf33e7e53cd46313f3655f274b3f6ca0fd679d
|
[
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# Copyright 2018 The Google AI Language Team Authors and The HuggingFace Inc. team.
# Copyright (c) 2018, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""PyTorch BERT model."""
import math
import os
import warnings
from dataclasses import dataclass
from typing import List, Optional, Tuple, Union
import torch
import torch.utils.checkpoint
from packaging import version
from torch import nn
from torch.nn import BCEWithLogitsLoss, CrossEntropyLoss, MSELoss
from ...activations import ACT2FN
from ...file_utils import (
ModelOutput,
add_code_sample_docstrings,
add_start_docstrings,
add_start_docstrings_to_model_forward,
replace_return_docstrings,
)
from ...modeling_outputs import (
BaseModelOutputWithPastAndCrossAttentions,
BaseModelOutputWithPoolingAndCrossAttentions,
CausalLMOutputWithCrossAttentions,
MaskedLMOutput,
MultipleChoiceModelOutput,
NextSentencePredictorOutput,
QuestionAnsweringModelOutput,
SequenceClassifierOutput,
TokenClassifierOutput,
)
from ...modeling_utils import (
PreTrainedModel,
apply_chunking_to_forward,
find_pruneable_heads_and_indices,
prune_linear_layer,
)
from ...utils import logging
from .configuration_bert import BertConfig
logger = logging.get_logger(__name__)
_CHECKPOINT_FOR_DOC = "bert-base-uncased"
_CONFIG_FOR_DOC = "BertConfig"
_TOKENIZER_FOR_DOC = "BertTokenizer"
BERT_PRETRAINED_MODEL_ARCHIVE_LIST = [
"bert-base-uncased",
"bert-large-uncased",
"bert-base-cased",
"bert-large-cased",
"bert-base-multilingual-uncased",
"bert-base-multilingual-cased",
"bert-base-chinese",
"bert-base-german-cased",
"bert-large-uncased-whole-word-masking",
"bert-large-cased-whole-word-masking",
"bert-large-uncased-whole-word-masking-finetuned-squad",
"bert-large-cased-whole-word-masking-finetuned-squad",
"bert-base-cased-finetuned-mrpc",
"bert-base-german-dbmdz-cased",
"bert-base-german-dbmdz-uncased",
"cl-tohoku/bert-base-japanese",
"cl-tohoku/bert-base-japanese-whole-word-masking",
"cl-tohoku/bert-base-japanese-char",
"cl-tohoku/bert-base-japanese-char-whole-word-masking",
"TurkuNLP/bert-base-finnish-cased-v1",
"TurkuNLP/bert-base-finnish-uncased-v1",
"wietsedv/bert-base-dutch-cased",
# See all BERT models at https://huggingface.co/models?filter=bert
]
def load_tf_weights_in_bert(model, config, tf_checkpoint_path):
"""Load tf checkpoints in a pytorch model."""
try:
import re
import numpy as np
import tensorflow as tf
except ImportError:
logger.error(
"Loading a TensorFlow model in PyTorch, requires TensorFlow to be installed. Please see "
"https://www.tensorflow.org/install/ for installation instructions."
)
raise
tf_path = os.path.abspath(tf_checkpoint_path)
logger.info(f"Converting TensorFlow checkpoint from {tf_path}")
# Load weights from TF model
init_vars = tf.train.list_variables(tf_path)
names = []
arrays = []
for name, shape in init_vars:
logger.info(f"Loading TF weight {name} with shape {shape}")
array = tf.train.load_variable(tf_path, name)
names.append(name)
arrays.append(array)
for name, array in zip(names, arrays):
name = name.split("/")
# adam_v and adam_m are variables used in AdamWeightDecayOptimizer to calculated m and v
# which are not required for using pretrained model
if any(
n in ["adam_v", "adam_m", "AdamWeightDecayOptimizer", "AdamWeightDecayOptimizer_1", "global_step"]
for n in name
):
logger.info(f"Skipping {'/'.join(name)}")
continue
pointer = model
for m_name in name:
if re.fullmatch(r"[A-Za-z]+_\d+", m_name):
scope_names = re.split(r"_(\d+)", m_name)
else:
scope_names = [m_name]
if scope_names[0] == "kernel" or scope_names[0] == "gamma":
pointer = getattr(pointer, "weight")
elif scope_names[0] == "output_bias" or scope_names[0] == "beta":
pointer = getattr(pointer, "bias")
elif scope_names[0] == "output_weights":
pointer = getattr(pointer, "weight")
elif scope_names[0] == "squad":
pointer = getattr(pointer, "classifier")
else:
try:
pointer = getattr(pointer, scope_names[0])
except AttributeError:
logger.info(f"Skipping {'/'.join(name)}")
continue
if len(scope_names) >= 2:
num = int(scope_names[1])
pointer = pointer[num]
if m_name[-11:] == "_embeddings":
pointer = getattr(pointer, "weight")
elif m_name == "kernel":
array = np.transpose(array)
try:
if pointer.shape != array.shape:
raise ValueError(f"Pointer shape {pointer.shape} and array shape {array.shape} mismatched")
except AssertionError as e:
e.args += (pointer.shape, array.shape)
raise
logger.info(f"Initialize PyTorch weight {name}")
pointer.data = torch.from_numpy(array)
return model
class BertEmbeddings(nn.Module):
"""Construct the embeddings from word, position and token_type embeddings."""
def __init__(self, config):
super().__init__()
self.word_embeddings = nn.Embedding(config.vocab_size, config.hidden_size, padding_idx=config.pad_token_id)
self.position_embeddings = nn.Embedding(config.max_position_embeddings, config.hidden_size)
self.token_type_embeddings = nn.Embedding(config.type_vocab_size, config.hidden_size)
# self.LayerNorm is not snake-cased to stick with TensorFlow model variable name and be able to load
# any TensorFlow checkpoint file
self.LayerNorm = nn.LayerNorm(config.hidden_size, eps=config.layer_norm_eps)
self.dropout = nn.Dropout(config.hidden_dropout_prob)
# position_ids (1, len position emb) is contiguous in memory and exported when serialized
self.position_embedding_type = getattr(config, "position_embedding_type", "absolute")
self.register_buffer("position_ids", torch.arange(config.max_position_embeddings).expand((1, -1)))
if version.parse(torch.__version__) > version.parse("1.6.0"):
self.register_buffer(
"token_type_ids",
torch.zeros(self.position_ids.size(), dtype=torch.long),
persistent=False,
)
def forward(
self, input_ids=None, token_type_ids=None, position_ids=None, inputs_embeds=None, past_key_values_length=0
):
if input_ids is not None:
input_shape = input_ids.size()
else:
input_shape = inputs_embeds.size()[:-1]
seq_length = input_shape[1]
if position_ids is None:
position_ids = self.position_ids[:, past_key_values_length : seq_length + past_key_values_length]
# Setting the token_type_ids to the registered buffer in constructor where it is all zeros, which usually occurs
# when its auto-generated, registered buffer helps users when tracing the model without passing token_type_ids, solves
# issue #5664
if token_type_ids is None:
if hasattr(self, "token_type_ids"):
buffered_token_type_ids = self.token_type_ids[:, :seq_length]
buffered_token_type_ids_expanded = buffered_token_type_ids.expand(input_shape[0], seq_length)
token_type_ids = buffered_token_type_ids_expanded
else:
token_type_ids = torch.zeros(input_shape, dtype=torch.long, device=self.position_ids.device)
if inputs_embeds is None:
inputs_embeds = self.word_embeddings(input_ids)
token_type_embeddings = self.token_type_embeddings(token_type_ids)
embeddings = inputs_embeds + token_type_embeddings
if self.position_embedding_type == "absolute":
position_embeddings = self.position_embeddings(position_ids)
embeddings += position_embeddings
embeddings = self.LayerNorm(embeddings)
embeddings = self.dropout(embeddings)
return embeddings
class BertSelfAttention(nn.Module):
def __init__(self, config, position_embedding_type=None):
super().__init__()
if config.hidden_size % config.num_attention_heads != 0 and not hasattr(config, "embedding_size"):
raise ValueError(
f"The hidden size ({config.hidden_size}) is not a multiple of the number of attention "
f"heads ({config.num_attention_heads})"
)
self.num_attention_heads = config.num_attention_heads
self.attention_head_size = int(config.hidden_size / config.num_attention_heads)
self.all_head_size = self.num_attention_heads * self.attention_head_size
self.query = nn.Linear(config.hidden_size, self.all_head_size)
self.key = nn.Linear(config.hidden_size, self.all_head_size)
self.value = nn.Linear(config.hidden_size, self.all_head_size)
self.dropout = nn.Dropout(config.attention_probs_dropout_prob)
self.position_embedding_type = position_embedding_type or getattr(
config, "position_embedding_type", "absolute"
)
if self.position_embedding_type == "relative_key" or self.position_embedding_type == "relative_key_query":
self.max_position_embeddings = config.max_position_embeddings
self.distance_embedding = nn.Embedding(2 * config.max_position_embeddings - 1, self.attention_head_size)
self.is_decoder = config.is_decoder
def transpose_for_scores(self, x):
new_x_shape = x.size()[:-1] + (self.num_attention_heads, self.attention_head_size)
x = x.view(new_x_shape)
return x.permute(0, 2, 1, 3)
def forward(
self,
hidden_states,
attention_mask=None,
head_mask=None,
encoder_hidden_states=None,
encoder_attention_mask=None,
past_key_value=None,
output_attentions=False,
):
mixed_query_layer = self.query(hidden_states)
# If this is instantiated as a cross-attention module, the keys
# and values come from an encoder; the attention mask needs to be
# such that the encoder's padding tokens are not attended to.
is_cross_attention = encoder_hidden_states is not None
if is_cross_attention and past_key_value is not None:
# reuse k,v, cross_attentions
key_layer = past_key_value[0]
value_layer = past_key_value[1]
attention_mask = encoder_attention_mask
elif is_cross_attention:
key_layer = self.transpose_for_scores(self.key(encoder_hidden_states))
value_layer = self.transpose_for_scores(self.value(encoder_hidden_states))
attention_mask = encoder_attention_mask
elif past_key_value is not None:
key_layer = self.transpose_for_scores(self.key(hidden_states))
value_layer = self.transpose_for_scores(self.value(hidden_states))
key_layer = torch.cat([past_key_value[0], key_layer], dim=2)
value_layer = torch.cat([past_key_value[1], value_layer], dim=2)
else:
key_layer = self.transpose_for_scores(self.key(hidden_states))
value_layer = self.transpose_for_scores(self.value(hidden_states))
query_layer = self.transpose_for_scores(mixed_query_layer)
if self.is_decoder:
# if cross_attention save Tuple(torch.Tensor, torch.Tensor) of all cross attention key/value_states.
# Further calls to cross_attention layer can then reuse all cross-attention
# key/value_states (first "if" case)
# if uni-directional self-attention (decoder) save Tuple(torch.Tensor, torch.Tensor) of
# all previous decoder key/value_states. Further calls to uni-directional self-attention
# can concat previous decoder key/value_states to current projected key/value_states (third "elif" case)
# if encoder bi-directional self-attention `past_key_value` is always `None`
past_key_value = (key_layer, value_layer)
# Take the dot product between "query" and "key" to get the raw attention scores.
attention_scores = torch.matmul(query_layer, key_layer.transpose(-1, -2))
if self.position_embedding_type == "relative_key" or self.position_embedding_type == "relative_key_query":
seq_length = hidden_states.size()[1]
position_ids_l = torch.arange(seq_length, dtype=torch.long, device=hidden_states.device).view(-1, 1)
position_ids_r = torch.arange(seq_length, dtype=torch.long, device=hidden_states.device).view(1, -1)
distance = position_ids_l - position_ids_r
positional_embedding = self.distance_embedding(distance + self.max_position_embeddings - 1)
positional_embedding = positional_embedding.to(dtype=query_layer.dtype) # fp16 compatibility
if self.position_embedding_type == "relative_key":
relative_position_scores = torch.einsum("bhld,lrd->bhlr", query_layer, positional_embedding)
attention_scores = attention_scores + relative_position_scores
elif self.position_embedding_type == "relative_key_query":
relative_position_scores_query = torch.einsum("bhld,lrd->bhlr", query_layer, positional_embedding)
relative_position_scores_key = torch.einsum("bhrd,lrd->bhlr", key_layer, positional_embedding)
attention_scores = attention_scores + relative_position_scores_query + relative_position_scores_key
attention_scores = attention_scores / math.sqrt(self.attention_head_size)
if attention_mask is not None:
# Apply the attention mask is (precomputed for all layers in BertModel forward() function)
attention_scores = attention_scores + attention_mask
# Normalize the attention scores to probabilities.
attention_probs = nn.functional.softmax(attention_scores, dim=-1)
# This is actually dropping out entire tokens to attend to, which might
# seem a bit unusual, but is taken from the original Transformer paper.
attention_probs = self.dropout(attention_probs)
# Mask heads if we want to
if head_mask is not None:
attention_probs = attention_probs * head_mask
context_layer = torch.matmul(attention_probs, value_layer)
context_layer = context_layer.permute(0, 2, 1, 3).contiguous()
new_context_layer_shape = context_layer.size()[:-2] + (self.all_head_size,)
context_layer = context_layer.view(new_context_layer_shape)
outputs = (context_layer, attention_probs) if output_attentions else (context_layer,)
if self.is_decoder:
outputs = outputs + (past_key_value,)
return outputs
class BertSelfOutput(nn.Module):
def __init__(self, config):
super().__init__()
self.dense = nn.Linear(config.hidden_size, config.hidden_size)
self.LayerNorm = nn.LayerNorm(config.hidden_size, eps=config.layer_norm_eps)
self.dropout = nn.Dropout(config.hidden_dropout_prob)
def forward(self, hidden_states, input_tensor):
hidden_states = self.dense(hidden_states)
hidden_states = self.dropout(hidden_states)
hidden_states = self.LayerNorm(hidden_states + input_tensor)
return hidden_states
class BertAttention(nn.Module):
def __init__(self, config, position_embedding_type=None):
super().__init__()
self.self = BertSelfAttention(config, position_embedding_type=position_embedding_type)
self.output = BertSelfOutput(config)
self.pruned_heads = set()
def prune_heads(self, heads):
if len(heads) == 0:
return
heads, index = find_pruneable_heads_and_indices(
heads, self.self.num_attention_heads, self.self.attention_head_size, self.pruned_heads
)
# Prune linear layers
self.self.query = prune_linear_layer(self.self.query, index)
self.self.key = prune_linear_layer(self.self.key, index)
self.self.value = prune_linear_layer(self.self.value, index)
self.output.dense = prune_linear_layer(self.output.dense, index, dim=1)
# Update hyper params and store pruned heads
self.self.num_attention_heads = self.self.num_attention_heads - len(heads)
self.self.all_head_size = self.self.attention_head_size * self.self.num_attention_heads
self.pruned_heads = self.pruned_heads.union(heads)
def forward(
self,
hidden_states,
attention_mask=None,
head_mask=None,
encoder_hidden_states=None,
encoder_attention_mask=None,
past_key_value=None,
output_attentions=False,
):
self_outputs = self.self(
hidden_states,
attention_mask,
head_mask,
encoder_hidden_states,
encoder_attention_mask,
past_key_value,
output_attentions,
)
attention_output = self.output(self_outputs[0], hidden_states)
outputs = (attention_output,) + self_outputs[1:] # add attentions if we output them
return outputs
class BertIntermediate(nn.Module):
def __init__(self, config):
super().__init__()
self.dense = nn.Linear(config.hidden_size, config.intermediate_size)
if isinstance(config.hidden_act, str):
self.intermediate_act_fn = ACT2FN[config.hidden_act]
else:
self.intermediate_act_fn = config.hidden_act
def forward(self, hidden_states):
hidden_states = self.dense(hidden_states)
hidden_states = self.intermediate_act_fn(hidden_states)
return hidden_states
class BertOutput(nn.Module):
def __init__(self, config):
super().__init__()
self.dense = nn.Linear(config.intermediate_size, config.hidden_size)
self.LayerNorm = nn.LayerNorm(config.hidden_size, eps=config.layer_norm_eps)
self.dropout = nn.Dropout(config.hidden_dropout_prob)
def forward(self, hidden_states, input_tensor):
hidden_states = self.dense(hidden_states)
hidden_states = self.dropout(hidden_states)
hidden_states = self.LayerNorm(hidden_states + input_tensor)
return hidden_states
class BertLayer(nn.Module):
def __init__(self, config):
super().__init__()
self.chunk_size_feed_forward = config.chunk_size_feed_forward
self.seq_len_dim = 1
self.attention = BertAttention(config)
self.is_decoder = config.is_decoder
self.add_cross_attention = config.add_cross_attention
if self.add_cross_attention:
if not self.is_decoder:
raise ValueError(f"{self} should be used as a decoder model if cross attention is added")
self.crossattention = BertAttention(config, position_embedding_type="absolute")
self.intermediate = BertIntermediate(config)
self.output = BertOutput(config)
def forward(
self,
hidden_states,
attention_mask=None,
head_mask=None,
encoder_hidden_states=None,
encoder_attention_mask=None,
past_key_value=None,
output_attentions=False,
):
# decoder uni-directional self-attention cached key/values tuple is at positions 1,2
self_attn_past_key_value = past_key_value[:2] if past_key_value is not None else None
self_attention_outputs = self.attention(
hidden_states,
attention_mask,
head_mask,
output_attentions=output_attentions,
past_key_value=self_attn_past_key_value,
)
attention_output = self_attention_outputs[0]
# if decoder, the last output is tuple of self-attn cache
if self.is_decoder:
outputs = self_attention_outputs[1:-1]
present_key_value = self_attention_outputs[-1]
else:
outputs = self_attention_outputs[1:] # add self attentions if we output attention weights
cross_attn_present_key_value = None
if self.is_decoder and encoder_hidden_states is not None:
if not hasattr(self, "crossattention"):
raise ValueError(
f"If `encoder_hidden_states` are passed, {self} has to be instantiated with cross-attention layers by setting `config.add_cross_attention=True`"
)
# cross_attn cached key/values tuple is at positions 3,4 of past_key_value tuple
cross_attn_past_key_value = past_key_value[-2:] if past_key_value is not None else None
cross_attention_outputs = self.crossattention(
attention_output,
attention_mask,
head_mask,
encoder_hidden_states,
encoder_attention_mask,
cross_attn_past_key_value,
output_attentions,
)
attention_output = cross_attention_outputs[0]
outputs = outputs + cross_attention_outputs[1:-1] # add cross attentions if we output attention weights
# add cross-attn cache to positions 3,4 of present_key_value tuple
cross_attn_present_key_value = cross_attention_outputs[-1]
present_key_value = present_key_value + cross_attn_present_key_value
layer_output = apply_chunking_to_forward(
self.feed_forward_chunk, self.chunk_size_feed_forward, self.seq_len_dim, attention_output
)
outputs = (layer_output,) + outputs
# if decoder, return the attn key/values as the last output
if self.is_decoder:
outputs = outputs + (present_key_value,)
return outputs
def feed_forward_chunk(self, attention_output):
intermediate_output = self.intermediate(attention_output)
layer_output = self.output(intermediate_output, attention_output)
return layer_output
class BertEncoder(nn.Module):
def __init__(self, config):
super().__init__()
self.config = config
self.layer = nn.ModuleList([BertLayer(config) for _ in range(config.num_hidden_layers)])
self.gradient_checkpointing = False
def forward(
self,
hidden_states,
attention_mask=None,
head_mask=None,
encoder_hidden_states=None,
encoder_attention_mask=None,
past_key_values=None,
use_cache=None,
output_attentions=False,
output_hidden_states=False,
return_dict=True,
):
all_hidden_states = () if output_hidden_states else None
all_self_attentions = () if output_attentions else None
all_cross_attentions = () if output_attentions and self.config.add_cross_attention else None
next_decoder_cache = () if use_cache else None
for i, layer_module in enumerate(self.layer):
if output_hidden_states:
all_hidden_states = all_hidden_states + (hidden_states,)
layer_head_mask = head_mask[i] if head_mask is not None else None
past_key_value = past_key_values[i] if past_key_values is not None else None
if self.gradient_checkpointing and self.training:
if use_cache:
logger.warning(
"`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..."
)
use_cache = False
def create_custom_forward(module):
def custom_forward(*inputs):
return module(*inputs, past_key_value, output_attentions)
return custom_forward
layer_outputs = torch.utils.checkpoint.checkpoint(
create_custom_forward(layer_module),
hidden_states,
attention_mask,
layer_head_mask,
encoder_hidden_states,
encoder_attention_mask,
)
else:
layer_outputs = layer_module(
hidden_states,
attention_mask,
layer_head_mask,
encoder_hidden_states,
encoder_attention_mask,
past_key_value,
output_attentions,
)
hidden_states = layer_outputs[0]
if use_cache:
next_decoder_cache += (layer_outputs[-1],)
if output_attentions:
all_self_attentions = all_self_attentions + (layer_outputs[1],)
if self.config.add_cross_attention:
all_cross_attentions = all_cross_attentions + (layer_outputs[2],)
if output_hidden_states:
all_hidden_states = all_hidden_states + (hidden_states,)
if not return_dict:
return tuple(
v
for v in [
hidden_states,
next_decoder_cache,
all_hidden_states,
all_self_attentions,
all_cross_attentions,
]
if v is not None
)
return BaseModelOutputWithPastAndCrossAttentions(
last_hidden_state=hidden_states,
past_key_values=next_decoder_cache,
hidden_states=all_hidden_states,
attentions=all_self_attentions,
cross_attentions=all_cross_attentions,
)
class BertPooler(nn.Module):
def __init__(self, config):
super().__init__()
self.dense = nn.Linear(config.hidden_size, config.hidden_size)
self.activation = nn.Tanh()
def forward(self, hidden_states):
# We "pool" the model by simply taking the hidden state corresponding
# to the first token.
first_token_tensor = hidden_states[:, 0]
pooled_output = self.dense(first_token_tensor)
pooled_output = self.activation(pooled_output)
return pooled_output
class BertPredictionHeadTransform(nn.Module):
def __init__(self, config):
super().__init__()
self.dense = nn.Linear(config.hidden_size, config.hidden_size)
if isinstance(config.hidden_act, str):
self.transform_act_fn = ACT2FN[config.hidden_act]
else:
self.transform_act_fn = config.hidden_act
self.LayerNorm = nn.LayerNorm(config.hidden_size, eps=config.layer_norm_eps)
def forward(self, hidden_states):
hidden_states = self.dense(hidden_states)
hidden_states = self.transform_act_fn(hidden_states)
hidden_states = self.LayerNorm(hidden_states)
return hidden_states
class BertLMPredictionHead(nn.Module):
def __init__(self, config):
super().__init__()
self.transform = BertPredictionHeadTransform(config)
# The output weights are the same as the input embeddings, but there is
# an output-only bias for each token.
self.decoder = nn.Linear(config.hidden_size, config.vocab_size, bias=False)
self.bias = nn.Parameter(torch.zeros(config.vocab_size))
# Need a link between the two variables so that the bias is correctly resized with `resize_token_embeddings`
self.decoder.bias = self.bias
def forward(self, hidden_states):
hidden_states = self.transform(hidden_states)
hidden_states = self.decoder(hidden_states)
return hidden_states
class BertOnlyMLMHead(nn.Module):
def __init__(self, config):
super().__init__()
self.predictions = BertLMPredictionHead(config)
def forward(self, sequence_output):
prediction_scores = self.predictions(sequence_output)
return prediction_scores
class BertOnlyNSPHead(nn.Module):
def __init__(self, config):
super().__init__()
self.seq_relationship = nn.Linear(config.hidden_size, 2)
def forward(self, pooled_output):
seq_relationship_score = self.seq_relationship(pooled_output)
return seq_relationship_score
class BertPreTrainingHeads(nn.Module):
def __init__(self, config):
super().__init__()
self.predictions = BertLMPredictionHead(config)
self.seq_relationship = nn.Linear(config.hidden_size, 2)
def forward(self, sequence_output, pooled_output):
prediction_scores = self.predictions(sequence_output)
seq_relationship_score = self.seq_relationship(pooled_output)
return prediction_scores, seq_relationship_score
class BertPreTrainedModel(PreTrainedModel):
"""
An abstract class to handle weights initialization and a simple interface for downloading and loading pretrained
models.
"""
config_class = BertConfig
load_tf_weights = load_tf_weights_in_bert
base_model_prefix = "bert"
supports_gradient_checkpointing = True
_keys_to_ignore_on_load_missing = [r"position_ids"]
def _init_weights(self, module):
"""Initialize the weights"""
if isinstance(module, nn.Linear):
# Slightly different from the TF version which uses truncated_normal for initialization
# cf https://github.com/pytorch/pytorch/pull/5617
module.weight.data.normal_(mean=0.0, std=self.config.initializer_range)
if module.bias is not None:
module.bias.data.zero_()
elif isinstance(module, nn.Embedding):
module.weight.data.normal_(mean=0.0, std=self.config.initializer_range)
if module.padding_idx is not None:
module.weight.data[module.padding_idx].zero_()
elif isinstance(module, nn.LayerNorm):
module.bias.data.zero_()
module.weight.data.fill_(1.0)
def _set_gradient_checkpointing(self, module, value=False):
if isinstance(module, BertEncoder):
module.gradient_checkpointing = value
@dataclass
class BertForPreTrainingOutput(ModelOutput):
"""
Output type of [`BertForPreTraining`].
Args:
loss (*optional*, returned when `labels` is provided, `torch.FloatTensor` of shape `(1,)`):
Total loss as the sum of the masked language modeling loss and the next sequence prediction
(classification) loss.
prediction_logits (`torch.FloatTensor` of shape `(batch_size, sequence_length, config.vocab_size)`):
Prediction scores of the language modeling head (scores for each vocabulary token before SoftMax).
seq_relationship_logits (`torch.FloatTensor` of shape `(batch_size, 2)`):
Prediction scores of the next sequence prediction (classification) head (scores of True/False continuation
before SoftMax).
hidden_states (`tuple(torch.FloatTensor)`, *optional*, returned when `output_hidden_states=True` is passed or when `config.output_hidden_states=True`):
Tuple of `torch.FloatTensor` (one for the output of the embeddings + one for the output of each layer) of
shape `(batch_size, sequence_length, hidden_size)`.
Hidden-states of the model at the output of each layer plus the initial embedding outputs.
attentions (`tuple(torch.FloatTensor)`, *optional*, returned when `output_attentions=True` is passed or when `config.output_attentions=True`):
Tuple of `torch.FloatTensor` (one for each layer) of shape `(batch_size, num_heads, sequence_length,
sequence_length)`.
Attentions weights after the attention softmax, used to compute the weighted average in the self-attention
heads.
"""
loss: Optional[torch.FloatTensor] = None
prediction_logits: torch.FloatTensor = None
seq_relationship_logits: torch.FloatTensor = None
hidden_states: Optional[Tuple[torch.FloatTensor]] = None
attentions: Optional[Tuple[torch.FloatTensor]] = None
BERT_START_DOCSTRING = r"""
This model inherits from [`PreTrainedModel`]. Check the superclass documentation for the generic methods the
library implements for all its model (such as downloading or saving, resizing the input embeddings, pruning heads
etc.)
This model is also a PyTorch [torch.nn.Module](https://pytorch.org/docs/stable/nn.html#torch.nn.Module) subclass.
Use it as a regular PyTorch Module and refer to the PyTorch documentation for all matter related to general usage
and behavior.
Parameters:
config ([`BertConfig`]): Model configuration class with all the parameters of the model.
Initializing with a config file does not load the weights associated with the model, only the
configuration. Check out the [`~PreTrainedModel.from_pretrained`] method to load the model weights.
"""
BERT_INPUTS_DOCSTRING = r"""
Args:
input_ids (`torch.LongTensor` of shape `({0})`):
Indices of input sequence tokens in the vocabulary.
Indices can be obtained using [`BertTokenizer`]. See [`PreTrainedTokenizer.encode`] and
[`PreTrainedTokenizer.__call__`] for details.
[What are input IDs?](../glossary#input-ids)
attention_mask (`torch.FloatTensor` of shape `({0})`, *optional*):
Mask to avoid performing attention on padding token indices. Mask values selected in `[0, 1]`:
- 1 for tokens that are **not masked**,
- 0 for tokens that are **masked**.
[What are attention masks?](../glossary#attention-mask)
token_type_ids (`torch.LongTensor` of shape `({0})`, *optional*):
Segment token indices to indicate first and second portions of the inputs. Indices are selected in `[0,
1]`:
- 0 corresponds to a *sentence A* token,
- 1 corresponds to a *sentence B* token.
[What are token type IDs?](../glossary#token-type-ids)
position_ids (`torch.LongTensor` of shape `({0})`, *optional*):
Indices of positions of each input sequence tokens in the position embeddings. Selected in the range `[0,
config.max_position_embeddings - 1]`.
[What are position IDs?](../glossary#position-ids)
head_mask (`torch.FloatTensor` of shape `(num_heads,)` or `(num_layers, num_heads)`, *optional*):
Mask to nullify selected heads of the self-attention modules. Mask values selected in `[0, 1]`:
- 1 indicates the head is **not masked**,
- 0 indicates the head is **masked**.
inputs_embeds (`torch.FloatTensor` of shape `({0}, hidden_size)`, *optional*):
Optionally, instead of passing `input_ids` you can choose to directly pass an embedded representation. This
is useful if you want more control over how to convert `input_ids` indices into associated vectors than the
model's internal embedding lookup matrix.
output_attentions (`bool`, *optional*):
Whether or not to return the attentions tensors of all attention layers. See `attentions` under returned
tensors for more detail.
output_hidden_states (`bool`, *optional*):
Whether or not to return the hidden states of all layers. See `hidden_states` under returned tensors for
more detail.
return_dict (`bool`, *optional*):
Whether or not to return a [`~file_utils.ModelOutput`] instead of a plain tuple.
"""
@add_start_docstrings(
"The bare Bert Model transformer outputting raw hidden-states without any specific head on top.",
BERT_START_DOCSTRING,
)
class BertModel(BertPreTrainedModel):
"""
The model can behave as an encoder (with only self-attention) as well as a decoder, in which case a layer of
cross-attention is added between the self-attention layers, following the architecture described in [Attention is
all you need](https://arxiv.org/abs/1706.03762) by Ashish Vaswani, Noam Shazeer, Niki Parmar, Jakob Uszkoreit,
Llion Jones, Aidan N. Gomez, Lukasz Kaiser and Illia Polosukhin.
To behave as an decoder the model needs to be initialized with the `is_decoder` argument of the configuration set
to `True`. To be used in a Seq2Seq model, the model needs to initialized with both `is_decoder` argument and
`add_cross_attention` set to `True`; an `encoder_hidden_states` is then expected as an input to the forward pass.
"""
def __init__(self, config, add_pooling_layer=True):
super().__init__(config)
self.config = config
self.embeddings = BertEmbeddings(config)
self.encoder = BertEncoder(config)
self.pooler = BertPooler(config) if add_pooling_layer else None
# Initialize weights and apply final processing
self.post_init()
def get_input_embeddings(self):
return self.embeddings.word_embeddings
def set_input_embeddings(self, value):
self.embeddings.word_embeddings = value
def _prune_heads(self, heads_to_prune):
"""
Prunes heads of the model. heads_to_prune: dict of {layer_num: list of heads to prune in this layer} See base
class PreTrainedModel
"""
for layer, heads in heads_to_prune.items():
self.encoder.layer[layer].attention.prune_heads(heads)
@add_start_docstrings_to_model_forward(BERT_INPUTS_DOCSTRING.format("batch_size, sequence_length"))
@add_code_sample_docstrings(
processor_class=_TOKENIZER_FOR_DOC,
checkpoint=_CHECKPOINT_FOR_DOC,
output_type=BaseModelOutputWithPoolingAndCrossAttentions,
config_class=_CONFIG_FOR_DOC,
)
def forward(
self,
input_ids: Optional[torch.Tensor] = None,
attention_mask: Optional[torch.Tensor] = None,
token_type_ids: Optional[torch.Tensor] = None,
position_ids: Optional[torch.Tensor] = None,
head_mask: Optional[torch.Tensor] = None,
inputs_embeds: Optional[torch.Tensor] = None,
encoder_hidden_states: Optional[torch.Tensor] = None,
encoder_attention_mask: Optional[torch.Tensor] = None,
past_key_values: Optional[List[torch.FloatTensor]] = None,
use_cache: Optional[bool] = None,
output_attentions: Optional[bool] = None,
output_hidden_states: Optional[bool] = None,
return_dict: Optional[bool] = None,
) -> Union[Tuple, BaseModelOutputWithPoolingAndCrossAttentions]:
r"""
encoder_hidden_states (`torch.FloatTensor` of shape `(batch_size, sequence_length, hidden_size)`, *optional*):
Sequence of hidden-states at the output of the last layer of the encoder. Used in the cross-attention if
the model is configured as a decoder.
encoder_attention_mask (`torch.FloatTensor` of shape `(batch_size, sequence_length)`, *optional*):
Mask to avoid performing attention on the padding token indices of the encoder input. This mask is used in
the cross-attention if the model is configured as a decoder. Mask values selected in `[0, 1]`:
- 1 for tokens that are **not masked**,
- 0 for tokens that are **masked**.
past_key_values (`tuple(tuple(torch.FloatTensor))` of length `config.n_layers` with each tuple having 4 tensors of shape `(batch_size, num_heads, sequence_length - 1, embed_size_per_head)`):
Contains precomputed key and value hidden states of the attention blocks. Can be used to speed up decoding.
If `past_key_values` are used, the user can optionally input only the last `decoder_input_ids` (those that
don't have their past key value states given to this model) of shape `(batch_size, 1)` instead of all
`decoder_input_ids` of shape `(batch_size, sequence_length)`.
use_cache (`bool`, *optional*):
If set to `True`, `past_key_values` key value states are returned and can be used to speed up decoding (see
`past_key_values`).
"""
output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions
output_hidden_states = (
output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states
)
return_dict = return_dict if return_dict is not None else self.config.use_return_dict
if self.config.is_decoder:
use_cache = use_cache if use_cache is not None else self.config.use_cache
else:
use_cache = False
if input_ids is not None and inputs_embeds is not None:
raise ValueError("You cannot specify both input_ids and inputs_embeds at the same time")
elif input_ids is not None:
input_shape = input_ids.size()
elif inputs_embeds is not None:
input_shape = inputs_embeds.size()[:-1]
else:
raise ValueError("You have to specify either input_ids or inputs_embeds")
batch_size, seq_length = input_shape
device = input_ids.device if input_ids is not None else inputs_embeds.device
# past_key_values_length
past_key_values_length = past_key_values[0][0].shape[2] if past_key_values is not None else 0
if attention_mask is None:
attention_mask = torch.ones(((batch_size, seq_length + past_key_values_length)), device=device)
if token_type_ids is None:
if hasattr(self.embeddings, "token_type_ids"):
buffered_token_type_ids = self.embeddings.token_type_ids[:, :seq_length]
buffered_token_type_ids_expanded = buffered_token_type_ids.expand(batch_size, seq_length)
token_type_ids = buffered_token_type_ids_expanded
else:
token_type_ids = torch.zeros(input_shape, dtype=torch.long, device=device)
# We can provide a self-attention mask of dimensions [batch_size, from_seq_length, to_seq_length]
# ourselves in which case we just need to make it broadcastable to all heads.
extended_attention_mask: torch.Tensor = self.get_extended_attention_mask(attention_mask, input_shape, device)
# If a 2D or 3D attention mask is provided for the cross-attention
# we need to make broadcastable to [batch_size, num_heads, seq_length, seq_length]
if self.config.is_decoder and encoder_hidden_states is not None:
encoder_batch_size, encoder_sequence_length, _ = encoder_hidden_states.size()
encoder_hidden_shape = (encoder_batch_size, encoder_sequence_length)
if encoder_attention_mask is None:
encoder_attention_mask = torch.ones(encoder_hidden_shape, device=device)
encoder_extended_attention_mask = self.invert_attention_mask(encoder_attention_mask)
else:
encoder_extended_attention_mask = None
# Prepare head mask if needed
# 1.0 in head_mask indicate we keep the head
# attention_probs has shape bsz x n_heads x N x N
# input head_mask has shape [num_heads] or [num_hidden_layers x num_heads]
# and head_mask is converted to shape [num_hidden_layers x batch x num_heads x seq_length x seq_length]
head_mask = self.get_head_mask(head_mask, self.config.num_hidden_layers)
embedding_output = self.embeddings(
input_ids=input_ids,
position_ids=position_ids,
token_type_ids=token_type_ids,
inputs_embeds=inputs_embeds,
past_key_values_length=past_key_values_length,
)
encoder_outputs = self.encoder(
embedding_output,
attention_mask=extended_attention_mask,
head_mask=head_mask,
encoder_hidden_states=encoder_hidden_states,
encoder_attention_mask=encoder_extended_attention_mask,
past_key_values=past_key_values,
use_cache=use_cache,
output_attentions=output_attentions,
output_hidden_states=output_hidden_states,
return_dict=return_dict,
)
sequence_output = encoder_outputs[0]
pooled_output = self.pooler(sequence_output) if self.pooler is not None else None
if not return_dict:
return (sequence_output, pooled_output) + encoder_outputs[1:]
return BaseModelOutputWithPoolingAndCrossAttentions(
last_hidden_state=sequence_output,
pooler_output=pooled_output,
past_key_values=encoder_outputs.past_key_values,
hidden_states=encoder_outputs.hidden_states,
attentions=encoder_outputs.attentions,
cross_attentions=encoder_outputs.cross_attentions,
)
@add_start_docstrings(
"""
Bert Model with two heads on top as done during the pretraining: a `masked language modeling` head and a `next
sentence prediction (classification)` head.
""",
BERT_START_DOCSTRING,
)
class BertForPreTraining(BertPreTrainedModel):
def __init__(self, config):
super().__init__(config)
self.bert = BertModel(config)
self.cls = BertPreTrainingHeads(config)
# Initialize weights and apply final processing
self.post_init()
def get_output_embeddings(self):
return self.cls.predictions.decoder
def set_output_embeddings(self, new_embeddings):
self.cls.predictions.decoder = new_embeddings
@add_start_docstrings_to_model_forward(BERT_INPUTS_DOCSTRING.format("batch_size, sequence_length"))
@replace_return_docstrings(output_type=BertForPreTrainingOutput, config_class=_CONFIG_FOR_DOC)
def forward(
self,
input_ids: Optional[torch.Tensor] = None,
attention_mask: Optional[torch.Tensor] = None,
token_type_ids: Optional[torch.Tensor] = None,
position_ids: Optional[torch.Tensor] = None,
head_mask: Optional[torch.Tensor] = None,
inputs_embeds: Optional[torch.Tensor] = None,
labels: Optional[torch.Tensor] = None,
next_sentence_label: Optional[torch.Tensor] = None,
output_attentions: Optional[bool] = None,
output_hidden_states: Optional[bool] = None,
return_dict: Optional[bool] = None,
) -> Union[Tuple, BertForPreTrainingOutput]:
r"""
labels (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*):
Labels for computing the masked language modeling loss. Indices should be in `[-100, 0, ...,
config.vocab_size]` (see `input_ids` docstring) Tokens with indices set to `-100` are ignored (masked),
the loss is only computed for the tokens with labels in `[0, ..., config.vocab_size]`
next_sentence_label (`torch.LongTensor` of shape `(batch_size,)`, *optional*):
Labels for computing the next sequence prediction (classification) loss. Input should be a sequence
pair (see `input_ids` docstring) Indices should be in `[0, 1]`:
- 0 indicates sequence B is a continuation of sequence A,
- 1 indicates sequence B is a random sequence.
kwargs (`Dict[str, any]`, optional, defaults to *{}*):
Used to hide legacy arguments that have been deprecated.
Returns:
Example:
```python
>>> from transformers import BertTokenizer, BertForPreTraining
>>> import torch
>>> tokenizer = BertTokenizer.from_pretrained("bert-base-uncased")
>>> model = BertForPreTraining.from_pretrained("bert-base-uncased")
>>> inputs = tokenizer("Hello, my dog is cute", return_tensors="pt")
>>> outputs = model(**inputs)
>>> prediction_logits = outputs.prediction_logits
>>> seq_relationship_logits = outputs.seq_relationship_logits
```
"""
return_dict = return_dict if return_dict is not None else self.config.use_return_dict
outputs = self.bert(
input_ids,
attention_mask=attention_mask,
token_type_ids=token_type_ids,
position_ids=position_ids,
head_mask=head_mask,
inputs_embeds=inputs_embeds,
output_attentions=output_attentions,
output_hidden_states=output_hidden_states,
return_dict=return_dict,
)
sequence_output, pooled_output = outputs[:2]
prediction_scores, seq_relationship_score = self.cls(sequence_output, pooled_output)
total_loss = None
if labels is not None and next_sentence_label is not None:
loss_fct = CrossEntropyLoss()
masked_lm_loss = loss_fct(prediction_scores.view(-1, self.config.vocab_size), labels.view(-1))
next_sentence_loss = loss_fct(seq_relationship_score.view(-1, 2), next_sentence_label.view(-1))
total_loss = masked_lm_loss + next_sentence_loss
if not return_dict:
output = (prediction_scores, seq_relationship_score) + outputs[2:]
return ((total_loss,) + output) if total_loss is not None else output
return BertForPreTrainingOutput(
loss=total_loss,
prediction_logits=prediction_scores,
seq_relationship_logits=seq_relationship_score,
hidden_states=outputs.hidden_states,
attentions=outputs.attentions,
)
@add_start_docstrings(
"""Bert Model with a `language modeling` head on top for CLM fine-tuning.""", BERT_START_DOCSTRING
)
class BertLMHeadModel(BertPreTrainedModel):
_keys_to_ignore_on_load_unexpected = [r"pooler"]
_keys_to_ignore_on_load_missing = [r"position_ids", r"predictions.decoder.bias"]
def __init__(self, config):
super().__init__(config)
if not config.is_decoder:
logger.warning("If you want to use `BertLMHeadModel` as a standalone, add `is_decoder=True.`")
self.bert = BertModel(config, add_pooling_layer=False)
self.cls = BertOnlyMLMHead(config)
# Initialize weights and apply final processing
self.post_init()
def get_output_embeddings(self):
return self.cls.predictions.decoder
def set_output_embeddings(self, new_embeddings):
self.cls.predictions.decoder = new_embeddings
@add_start_docstrings_to_model_forward(BERT_INPUTS_DOCSTRING.format("batch_size, sequence_length"))
@replace_return_docstrings(output_type=CausalLMOutputWithCrossAttentions, config_class=_CONFIG_FOR_DOC)
def forward(
self,
input_ids: Optional[torch.Tensor] = None,
attention_mask: Optional[torch.Tensor] = None,
token_type_ids: Optional[torch.Tensor] = None,
position_ids: Optional[torch.Tensor] = None,
head_mask: Optional[torch.Tensor] = None,
inputs_embeds: Optional[torch.Tensor] = None,
encoder_hidden_states: Optional[torch.Tensor] = None,
encoder_attention_mask: Optional[torch.Tensor] = None,
labels: Optional[torch.Tensor] = None,
past_key_values: Optional[List[torch.Tensor]] = None,
use_cache: Optional[bool] = None,
output_attentions: Optional[bool] = None,
output_hidden_states: Optional[bool] = None,
return_dict: Optional[bool] = None,
) -> Union[Tuple, CausalLMOutputWithCrossAttentions]:
r"""
encoder_hidden_states (`torch.FloatTensor` of shape `(batch_size, sequence_length, hidden_size)`, *optional*):
Sequence of hidden-states at the output of the last layer of the encoder. Used in the cross-attention
if the model is configured as a decoder.
encoder_attention_mask (`torch.FloatTensor` of shape `(batch_size, sequence_length)`, *optional*):
Mask to avoid performing attention on the padding token indices of the encoder input. This mask is used
in the cross-attention if the model is configured as a decoder. Mask values selected in `[0, 1]`:
- 1 for tokens that are **not masked**,
- 0 for tokens that are **masked**.
labels (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*):
Labels for computing the left-to-right language modeling loss (next word prediction). Indices should be
in `[-100, 0, ..., config.vocab_size]` (see `input_ids` docstring) Tokens with indices set to `-100`
are ignored (masked), the loss is only computed for the tokens with labels n `[0, ...,
config.vocab_size]`
past_key_values (`tuple(tuple(torch.FloatTensor))` of length `config.n_layers` with each tuple having 4 tensors of shape `(batch_size, num_heads, sequence_length - 1, embed_size_per_head)`):
Contains precomputed key and value hidden states of the attention blocks. Can be used to speed up
decoding.
If `past_key_values` are used, the user can optionally input only the last `decoder_input_ids` (those
that don't have their past key value states given to this model) of shape `(batch_size, 1)` instead of
all `decoder_input_ids` of shape `(batch_size, sequence_length)`.
use_cache (`bool`, *optional*):
If set to `True`, `past_key_values` key value states are returned and can be used to speed up decoding
(see `past_key_values`).
Returns:
Example:
```python
>>> from transformers import BertTokenizer, BertLMHeadModel, BertConfig
>>> import torch
>>> tokenizer = BertTokenizer.from_pretrained("bert-base-cased")
>>> config = BertConfig.from_pretrained("bert-base-cased")
>>> config.is_decoder = True
>>> model = BertLMHeadModel.from_pretrained("bert-base-cased", config=config)
>>> inputs = tokenizer("Hello, my dog is cute", return_tensors="pt")
>>> outputs = model(**inputs)
>>> prediction_logits = outputs.logits
```
"""
return_dict = return_dict if return_dict is not None else self.config.use_return_dict
if labels is not None:
use_cache = False
outputs = self.bert(
input_ids,
attention_mask=attention_mask,
token_type_ids=token_type_ids,
position_ids=position_ids,
head_mask=head_mask,
inputs_embeds=inputs_embeds,
encoder_hidden_states=encoder_hidden_states,
encoder_attention_mask=encoder_attention_mask,
past_key_values=past_key_values,
use_cache=use_cache,
output_attentions=output_attentions,
output_hidden_states=output_hidden_states,
return_dict=return_dict,
)
sequence_output = outputs[0]
prediction_scores = self.cls(sequence_output)
lm_loss = None
if labels is not None:
# we are doing next-token prediction; shift prediction scores and input ids by one
shifted_prediction_scores = prediction_scores[:, :-1, :].contiguous()
labels = labels[:, 1:].contiguous()
loss_fct = CrossEntropyLoss()
lm_loss = loss_fct(shifted_prediction_scores.view(-1, self.config.vocab_size), labels.view(-1))
if not return_dict:
output = (prediction_scores,) + outputs[2:]
return ((lm_loss,) + output) if lm_loss is not None else output
return CausalLMOutputWithCrossAttentions(
loss=lm_loss,
logits=prediction_scores,
past_key_values=outputs.past_key_values,
hidden_states=outputs.hidden_states,
attentions=outputs.attentions,
cross_attentions=outputs.cross_attentions,
)
def prepare_inputs_for_generation(self, input_ids, past=None, attention_mask=None, **model_kwargs):
input_shape = input_ids.shape
# if model is used as a decoder in encoder-decoder model, the decoder attention mask is created on the fly
if attention_mask is None:
attention_mask = input_ids.new_ones(input_shape)
# cut decoder_input_ids if past is used
if past is not None:
input_ids = input_ids[:, -1:]
return {"input_ids": input_ids, "attention_mask": attention_mask, "past_key_values": past}
def _reorder_cache(self, past, beam_idx):
reordered_past = ()
for layer_past in past:
reordered_past += (tuple(past_state.index_select(0, beam_idx) for past_state in layer_past),)
return reordered_past
@add_start_docstrings("""Bert Model with a `language modeling` head on top.""", BERT_START_DOCSTRING)
class BertForMaskedLM(BertPreTrainedModel):
_keys_to_ignore_on_load_unexpected = [r"pooler"]
_keys_to_ignore_on_load_missing = [r"position_ids", r"predictions.decoder.bias"]
def __init__(self, config):
super().__init__(config)
if config.is_decoder:
logger.warning(
"If you want to use `BertForMaskedLM` make sure `config.is_decoder=False` for "
"bi-directional self-attention."
)
self.bert = BertModel(config, add_pooling_layer=False)
self.cls = BertOnlyMLMHead(config)
# Initialize weights and apply final processing
self.post_init()
def get_output_embeddings(self):
return self.cls.predictions.decoder
def set_output_embeddings(self, new_embeddings):
self.cls.predictions.decoder = new_embeddings
@add_start_docstrings_to_model_forward(BERT_INPUTS_DOCSTRING.format("batch_size, sequence_length"))
@add_code_sample_docstrings(
processor_class=_TOKENIZER_FOR_DOC,
checkpoint=_CHECKPOINT_FOR_DOC,
output_type=MaskedLMOutput,
config_class=_CONFIG_FOR_DOC,
)
def forward(
self,
input_ids: Optional[torch.Tensor] = None,
attention_mask: Optional[torch.Tensor] = None,
token_type_ids: Optional[torch.Tensor] = None,
position_ids: Optional[torch.Tensor] = None,
head_mask: Optional[torch.Tensor] = None,
inputs_embeds: Optional[torch.Tensor] = None,
encoder_hidden_states: Optional[torch.Tensor] = None,
encoder_attention_mask: Optional[torch.Tensor] = None,
labels: Optional[torch.Tensor] = None,
output_attentions: Optional[bool] = None,
output_hidden_states: Optional[bool] = None,
return_dict: Optional[bool] = None,
) -> Union[Tuple, MaskedLMOutput]:
r"""
labels (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*):
Labels for computing the masked language modeling loss. Indices should be in `[-100, 0, ...,
config.vocab_size]` (see `input_ids` docstring) Tokens with indices set to `-100` are ignored (masked), the
loss is only computed for the tokens with labels in `[0, ..., config.vocab_size]`
"""
return_dict = return_dict if return_dict is not None else self.config.use_return_dict
outputs = self.bert(
input_ids,
attention_mask=attention_mask,
token_type_ids=token_type_ids,
position_ids=position_ids,
head_mask=head_mask,
inputs_embeds=inputs_embeds,
encoder_hidden_states=encoder_hidden_states,
encoder_attention_mask=encoder_attention_mask,
output_attentions=output_attentions,
output_hidden_states=output_hidden_states,
return_dict=return_dict,
)
sequence_output = outputs[0]
prediction_scores = self.cls(sequence_output)
masked_lm_loss = None
if labels is not None:
loss_fct = CrossEntropyLoss() # -100 index = padding token
masked_lm_loss = loss_fct(prediction_scores.view(-1, self.config.vocab_size), labels.view(-1))
if not return_dict:
output = (prediction_scores,) + outputs[2:]
return ((masked_lm_loss,) + output) if masked_lm_loss is not None else output
return MaskedLMOutput(
loss=masked_lm_loss,
logits=prediction_scores,
hidden_states=outputs.hidden_states,
attentions=outputs.attentions,
)
def prepare_inputs_for_generation(self, input_ids, attention_mask=None, **model_kwargs):
input_shape = input_ids.shape
effective_batch_size = input_shape[0]
# add a dummy token
if self.config.pad_token_id is None:
raise ValueError("The PAD token should be defined for generation")
attention_mask = torch.cat([attention_mask, attention_mask.new_zeros((attention_mask.shape[0], 1))], dim=-1)
dummy_token = torch.full(
(effective_batch_size, 1), self.config.pad_token_id, dtype=torch.long, device=input_ids.device
)
input_ids = torch.cat([input_ids, dummy_token], dim=1)
return {"input_ids": input_ids, "attention_mask": attention_mask}
@add_start_docstrings(
"""Bert Model with a `next sentence prediction (classification)` head on top.""",
BERT_START_DOCSTRING,
)
class BertForNextSentencePrediction(BertPreTrainedModel):
def __init__(self, config):
super().__init__(config)
self.bert = BertModel(config)
self.cls = BertOnlyNSPHead(config)
# Initialize weights and apply final processing
self.post_init()
@add_start_docstrings_to_model_forward(BERT_INPUTS_DOCSTRING.format("batch_size, sequence_length"))
@replace_return_docstrings(output_type=NextSentencePredictorOutput, config_class=_CONFIG_FOR_DOC)
def forward(
self,
input_ids: Optional[torch.Tensor] = None,
attention_mask: Optional[torch.Tensor] = None,
token_type_ids: Optional[torch.Tensor] = None,
position_ids: Optional[torch.Tensor] = None,
head_mask: Optional[torch.Tensor] = None,
inputs_embeds: Optional[torch.Tensor] = None,
labels: Optional[torch.Tensor] = None,
output_attentions: Optional[bool] = None,
output_hidden_states: Optional[bool] = None,
return_dict: Optional[bool] = None,
**kwargs,
) -> Union[Tuple, NextSentencePredictorOutput]:
r"""
labels (`torch.LongTensor` of shape `(batch_size,)`, *optional*):
Labels for computing the next sequence prediction (classification) loss. Input should be a sequence pair
(see `input_ids` docstring). Indices should be in `[0, 1]`:
- 0 indicates sequence B is a continuation of sequence A,
- 1 indicates sequence B is a random sequence.
Returns:
Example:
```python
>>> from transformers import BertTokenizer, BertForNextSentencePrediction
>>> import torch
>>> tokenizer = BertTokenizer.from_pretrained("bert-base-uncased")
>>> model = BertForNextSentencePrediction.from_pretrained("bert-base-uncased")
>>> prompt = "In Italy, pizza served in formal settings, such as at a restaurant, is presented unsliced."
>>> next_sentence = "The sky is blue due to the shorter wavelength of blue light."
>>> encoding = tokenizer(prompt, next_sentence, return_tensors="pt")
>>> outputs = model(**encoding, labels=torch.LongTensor([1]))
>>> logits = outputs.logits
>>> assert logits[0, 0] < logits[0, 1] # next sentence was random
```
"""
if "next_sentence_label" in kwargs:
warnings.warn(
"The `next_sentence_label` argument is deprecated and will be removed in a future version, use `labels` instead.",
FutureWarning,
)
labels = kwargs.pop("next_sentence_label")
return_dict = return_dict if return_dict is not None else self.config.use_return_dict
outputs = self.bert(
input_ids,
attention_mask=attention_mask,
token_type_ids=token_type_ids,
position_ids=position_ids,
head_mask=head_mask,
inputs_embeds=inputs_embeds,
output_attentions=output_attentions,
output_hidden_states=output_hidden_states,
return_dict=return_dict,
)
pooled_output = outputs[1]
seq_relationship_scores = self.cls(pooled_output)
next_sentence_loss = None
if labels is not None:
loss_fct = CrossEntropyLoss()
next_sentence_loss = loss_fct(seq_relationship_scores.view(-1, 2), labels.view(-1))
if not return_dict:
output = (seq_relationship_scores,) + outputs[2:]
return ((next_sentence_loss,) + output) if next_sentence_loss is not None else output
return NextSentencePredictorOutput(
loss=next_sentence_loss,
logits=seq_relationship_scores,
hidden_states=outputs.hidden_states,
attentions=outputs.attentions,
)
@add_start_docstrings(
"""
Bert Model transformer with a sequence classification/regression head on top (a linear layer on top of the pooled
output) e.g. for GLUE tasks.
""",
BERT_START_DOCSTRING,
)
class BertForSequenceClassification(BertPreTrainedModel):
def __init__(self, config):
super().__init__(config)
self.num_labels = config.num_labels
self.config = config
self.bert = BertModel(config)
classifier_dropout = (
config.classifier_dropout if config.classifier_dropout is not None else config.hidden_dropout_prob
)
self.dropout = nn.Dropout(classifier_dropout)
self.classifier = nn.Linear(config.hidden_size, config.num_labels)
# Initialize weights and apply final processing
self.post_init()
@add_start_docstrings_to_model_forward(BERT_INPUTS_DOCSTRING.format("batch_size, sequence_length"))
@add_code_sample_docstrings(
processor_class=_TOKENIZER_FOR_DOC,
checkpoint=_CHECKPOINT_FOR_DOC,
output_type=SequenceClassifierOutput,
config_class=_CONFIG_FOR_DOC,
)
def forward(
self,
input_ids: Optional[torch.Tensor] = None,
attention_mask: Optional[torch.Tensor] = None,
token_type_ids: Optional[torch.Tensor] = None,
position_ids: Optional[torch.Tensor] = None,
head_mask: Optional[torch.Tensor] = None,
inputs_embeds: Optional[torch.Tensor] = None,
labels: Optional[torch.Tensor] = None,
output_attentions: Optional[bool] = None,
output_hidden_states: Optional[bool] = None,
return_dict: Optional[bool] = None,
) -> Union[Tuple, SequenceClassifierOutput]:
r"""
labels (`torch.LongTensor` of shape `(batch_size,)`, *optional*):
Labels for computing the sequence classification/regression loss. Indices should be in `[0, ...,
config.num_labels - 1]`. If `config.num_labels == 1` a regression loss is computed (Mean-Square loss), If
`config.num_labels > 1` a classification loss is computed (Cross-Entropy).
"""
return_dict = return_dict if return_dict is not None else self.config.use_return_dict
outputs = self.bert(
input_ids,
attention_mask=attention_mask,
token_type_ids=token_type_ids,
position_ids=position_ids,
head_mask=head_mask,
inputs_embeds=inputs_embeds,
output_attentions=output_attentions,
output_hidden_states=output_hidden_states,
return_dict=return_dict,
)
pooled_output = outputs[1]
pooled_output = self.dropout(pooled_output)
logits = self.classifier(pooled_output)
loss = None
if labels is not None:
if self.config.problem_type is None:
if self.num_labels == 1:
self.config.problem_type = "regression"
elif self.num_labels > 1 and (labels.dtype == torch.long or labels.dtype == torch.int):
self.config.problem_type = "single_label_classification"
else:
self.config.problem_type = "multi_label_classification"
if self.config.problem_type == "regression":
loss_fct = MSELoss()
if self.num_labels == 1:
loss = loss_fct(logits.squeeze(), labels.squeeze())
else:
loss = loss_fct(logits, labels)
elif self.config.problem_type == "single_label_classification":
loss_fct = CrossEntropyLoss()
loss = loss_fct(logits.view(-1, self.num_labels), labels.view(-1))
elif self.config.problem_type == "multi_label_classification":
loss_fct = BCEWithLogitsLoss()
loss = loss_fct(logits, labels)
if not return_dict:
output = (logits,) + outputs[2:]
return ((loss,) + output) if loss is not None else output
return SequenceClassifierOutput(
loss=loss,
logits=logits,
hidden_states=outputs.hidden_states,
attentions=outputs.attentions,
)
@add_start_docstrings(
"""
Bert Model with a multiple choice classification head on top (a linear layer on top of the pooled output and a
softmax) e.g. for RocStories/SWAG tasks.
""",
BERT_START_DOCSTRING,
)
class BertForMultipleChoice(BertPreTrainedModel):
def __init__(self, config):
super().__init__(config)
self.bert = BertModel(config)
classifier_dropout = (
config.classifier_dropout if config.classifier_dropout is not None else config.hidden_dropout_prob
)
self.dropout = nn.Dropout(classifier_dropout)
self.classifier = nn.Linear(config.hidden_size, 1)
# Initialize weights and apply final processing
self.post_init()
@add_start_docstrings_to_model_forward(BERT_INPUTS_DOCSTRING.format("batch_size, num_choices, sequence_length"))
@add_code_sample_docstrings(
processor_class=_TOKENIZER_FOR_DOC,
checkpoint=_CHECKPOINT_FOR_DOC,
output_type=MultipleChoiceModelOutput,
config_class=_CONFIG_FOR_DOC,
)
def forward(
self,
input_ids: Optional[torch.Tensor] = None,
attention_mask: Optional[torch.Tensor] = None,
token_type_ids: Optional[torch.Tensor] = None,
position_ids: Optional[torch.Tensor] = None,
head_mask: Optional[torch.Tensor] = None,
inputs_embeds: Optional[torch.Tensor] = None,
labels: Optional[torch.Tensor] = None,
output_attentions: Optional[bool] = None,
output_hidden_states: Optional[bool] = None,
return_dict: Optional[bool] = None,
) -> Union[Tuple, MultipleChoiceModelOutput]:
r"""
labels (`torch.LongTensor` of shape `(batch_size,)`, *optional*):
Labels for computing the multiple choice classification loss. Indices should be in `[0, ...,
num_choices-1]` where `num_choices` is the size of the second dimension of the input tensors. (See
`input_ids` above)
"""
return_dict = return_dict if return_dict is not None else self.config.use_return_dict
num_choices = input_ids.shape[1] if input_ids is not None else inputs_embeds.shape[1]
input_ids = input_ids.view(-1, input_ids.size(-1)) if input_ids is not None else None
attention_mask = attention_mask.view(-1, attention_mask.size(-1)) if attention_mask is not None else None
token_type_ids = token_type_ids.view(-1, token_type_ids.size(-1)) if token_type_ids is not None else None
position_ids = position_ids.view(-1, position_ids.size(-1)) if position_ids is not None else None
inputs_embeds = (
inputs_embeds.view(-1, inputs_embeds.size(-2), inputs_embeds.size(-1))
if inputs_embeds is not None
else None
)
outputs = self.bert(
input_ids,
attention_mask=attention_mask,
token_type_ids=token_type_ids,
position_ids=position_ids,
head_mask=head_mask,
inputs_embeds=inputs_embeds,
output_attentions=output_attentions,
output_hidden_states=output_hidden_states,
return_dict=return_dict,
)
pooled_output = outputs[1]
pooled_output = self.dropout(pooled_output)
logits = self.classifier(pooled_output)
reshaped_logits = logits.view(-1, num_choices)
loss = None
if labels is not None:
loss_fct = CrossEntropyLoss()
loss = loss_fct(reshaped_logits, labels)
if not return_dict:
output = (reshaped_logits,) + outputs[2:]
return ((loss,) + output) if loss is not None else output
return MultipleChoiceModelOutput(
loss=loss,
logits=reshaped_logits,
hidden_states=outputs.hidden_states,
attentions=outputs.attentions,
)
@add_start_docstrings(
"""
Bert Model with a token classification head on top (a linear layer on top of the hidden-states output) e.g. for
Named-Entity-Recognition (NER) tasks.
""",
BERT_START_DOCSTRING,
)
class BertForTokenClassification(BertPreTrainedModel):
_keys_to_ignore_on_load_unexpected = [r"pooler"]
def __init__(self, config):
super().__init__(config)
self.num_labels = config.num_labels
self.bert = BertModel(config, add_pooling_layer=False)
classifier_dropout = (
config.classifier_dropout if config.classifier_dropout is not None else config.hidden_dropout_prob
)
self.dropout = nn.Dropout(classifier_dropout)
self.classifier = nn.Linear(config.hidden_size, config.num_labels)
# Initialize weights and apply final processing
self.post_init()
@add_start_docstrings_to_model_forward(BERT_INPUTS_DOCSTRING.format("batch_size, sequence_length"))
@add_code_sample_docstrings(
processor_class=_TOKENIZER_FOR_DOC,
checkpoint=_CHECKPOINT_FOR_DOC,
output_type=TokenClassifierOutput,
config_class=_CONFIG_FOR_DOC,
)
def forward(
self,
input_ids: Optional[torch.Tensor] = None,
attention_mask: Optional[torch.Tensor] = None,
token_type_ids: Optional[torch.Tensor] = None,
position_ids: Optional[torch.Tensor] = None,
head_mask: Optional[torch.Tensor] = None,
inputs_embeds: Optional[torch.Tensor] = None,
labels: Optional[torch.Tensor] = None,
output_attentions: Optional[bool] = None,
output_hidden_states: Optional[bool] = None,
return_dict: Optional[bool] = None,
) -> Union[Tuple, TokenClassifierOutput]:
r"""
labels (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*):
Labels for computing the token classification loss. Indices should be in `[0, ..., config.num_labels - 1]`.
"""
return_dict = return_dict if return_dict is not None else self.config.use_return_dict
outputs = self.bert(
input_ids,
attention_mask=attention_mask,
token_type_ids=token_type_ids,
position_ids=position_ids,
head_mask=head_mask,
inputs_embeds=inputs_embeds,
output_attentions=output_attentions,
output_hidden_states=output_hidden_states,
return_dict=return_dict,
)
sequence_output = outputs[0]
sequence_output = self.dropout(sequence_output)
logits = self.classifier(sequence_output)
loss = None
if labels is not None:
loss_fct = CrossEntropyLoss()
loss = loss_fct(logits.view(-1, self.num_labels), labels.view(-1))
if not return_dict:
output = (logits,) + outputs[2:]
return ((loss,) + output) if loss is not None else output
return TokenClassifierOutput(
loss=loss,
logits=logits,
hidden_states=outputs.hidden_states,
attentions=outputs.attentions,
)
@add_start_docstrings(
"""
Bert Model with a span classification head on top for extractive question-answering tasks like SQuAD (a linear
layers on top of the hidden-states output to compute `span start logits` and `span end logits`).
""",
BERT_START_DOCSTRING,
)
class BertForQuestionAnswering(BertPreTrainedModel):
_keys_to_ignore_on_load_unexpected = [r"pooler"]
def __init__(self, config):
super().__init__(config)
self.num_labels = config.num_labels
self.bert = BertModel(config, add_pooling_layer=False)
self.qa_outputs = nn.Linear(config.hidden_size, config.num_labels)
# Initialize weights and apply final processing
self.post_init()
@add_start_docstrings_to_model_forward(BERT_INPUTS_DOCSTRING.format("batch_size, sequence_length"))
@add_code_sample_docstrings(
processor_class=_TOKENIZER_FOR_DOC,
checkpoint=_CHECKPOINT_FOR_DOC,
output_type=QuestionAnsweringModelOutput,
config_class=_CONFIG_FOR_DOC,
)
def forward(
self,
input_ids: Optional[torch.Tensor] = None,
attention_mask: Optional[torch.Tensor] = None,
token_type_ids: Optional[torch.Tensor] = None,
position_ids: Optional[torch.Tensor] = None,
head_mask: Optional[torch.Tensor] = None,
inputs_embeds: Optional[torch.Tensor] = None,
start_positions: Optional[torch.Tensor] = None,
end_positions: Optional[torch.Tensor] = None,
output_attentions: Optional[bool] = None,
output_hidden_states: Optional[bool] = None,
return_dict: Optional[bool] = None,
) -> Union[Tuple, QuestionAnsweringModelOutput]:
r"""
start_positions (`torch.LongTensor` of shape `(batch_size,)`, *optional*):
Labels for position (index) of the start of the labelled span for computing the token classification loss.
Positions are clamped to the length of the sequence (`sequence_length`). Position outside of the sequence
are not taken into account for computing the loss.
end_positions (`torch.LongTensor` of shape `(batch_size,)`, *optional*):
Labels for position (index) of the end of the labelled span for computing the token classification loss.
Positions are clamped to the length of the sequence (`sequence_length`). Position outside of the sequence
are not taken into account for computing the loss.
"""
return_dict = return_dict if return_dict is not None else self.config.use_return_dict
outputs = self.bert(
input_ids,
attention_mask=attention_mask,
token_type_ids=token_type_ids,
position_ids=position_ids,
head_mask=head_mask,
inputs_embeds=inputs_embeds,
output_attentions=output_attentions,
output_hidden_states=output_hidden_states,
return_dict=return_dict,
)
sequence_output = outputs[0]
logits = self.qa_outputs(sequence_output)
start_logits, end_logits = logits.split(1, dim=-1)
start_logits = start_logits.squeeze(-1).contiguous()
end_logits = end_logits.squeeze(-1).contiguous()
total_loss = None
if start_positions is not None and end_positions is not None:
# If we are on multi-GPU, split add a dimension
if len(start_positions.size()) > 1:
start_positions = start_positions.squeeze(-1)
if len(end_positions.size()) > 1:
end_positions = end_positions.squeeze(-1)
# sometimes the start/end positions are outside our model inputs, we ignore these terms
ignored_index = start_logits.size(1)
start_positions = start_positions.clamp(0, ignored_index)
end_positions = end_positions.clamp(0, ignored_index)
loss_fct = CrossEntropyLoss(ignore_index=ignored_index)
start_loss = loss_fct(start_logits, start_positions)
end_loss = loss_fct(end_logits, end_positions)
total_loss = (start_loss + end_loss) / 2
if not return_dict:
output = (start_logits, end_logits) + outputs[2:]
return ((total_loss,) + output) if total_loss is not None else output
return QuestionAnsweringModelOutput(
loss=total_loss,
start_logits=start_logits,
end_logits=end_logits,
hidden_states=outputs.hidden_states,
attentions=outputs.attentions,
)
| 43.602025
| 202
| 0.665632
|
46220da617b5efd4483962670eb50a9016d06156
| 35,967
|
py
|
Python
|
Mask-vs-No-Mask-Detection/Model/utils/general.py
|
talkshrey/ML-Reserve
|
62fe72ba7e7513be52955aff7a6c12d5fe44d757
|
[
"MIT"
] | 12
|
2021-09-11T09:44:23.000Z
|
2022-03-12T09:16:53.000Z
|
Mask-vs-No-Mask-Detection/Model/utils/general.py
|
talkshrey/ML-Reserve
|
62fe72ba7e7513be52955aff7a6c12d5fe44d757
|
[
"MIT"
] | 54
|
2021-09-11T09:48:07.000Z
|
2022-01-31T05:38:12.000Z
|
Mask-vs-No-Mask-Detection/Model/utils/general.py
|
talkshrey/ML-Reserve
|
62fe72ba7e7513be52955aff7a6c12d5fe44d757
|
[
"MIT"
] | 39
|
2021-09-11T09:44:26.000Z
|
2022-03-12T09:16:55.000Z
|
# YOLOv5 🚀 by Ultralytics, GPL-3.0 license
"""
General utils
"""
import contextlib
import glob
import logging
import math
import os
import platform
import random
import re
import signal
import time
import urllib
from itertools import repeat
from multiprocessing.pool import ThreadPool
from pathlib import Path
from subprocess import check_output
from zipfile import ZipFile
import cv2
import numpy as np
import pandas as pd
import pkg_resources as pkg
import torch
import torchvision
import yaml
from utils.downloads import gsutil_getsize
from utils.metrics import box_iou, fitness
# Settings
torch.set_printoptions(linewidth=320, precision=5, profile="long")
np.set_printoptions(
linewidth=320, formatter={"float_kind": "{:11.5g}".format}
) # format short g, %precision=5
pd.options.display.max_columns = 10
cv2.setNumThreads(
0
) # prevent OpenCV from multithreading (incompatible with PyTorch DataLoader)
os.environ["NUMEXPR_MAX_THREADS"] = str(min(os.cpu_count(), 8)) # NumExpr max threads
FILE = Path(__file__).resolve()
ROOT = FILE.parents[1] # YOLOv5 root directory
class Profile(contextlib.ContextDecorator):
# Usage: @Profile() decorator or 'with Profile():' context manager
def __enter__(self):
self.start = time.time()
def __exit__(self, type, value, traceback):
print(f"Profile results: {time.time() - self.start:.5f}s")
class Timeout(contextlib.ContextDecorator):
# Usage: @Timeout(seconds) decorator or 'with Timeout(seconds):' context manager
def __init__(self, seconds, *, timeout_msg="", suppress_timeout_errors=True):
self.seconds = int(seconds)
self.timeout_message = timeout_msg
self.suppress = bool(suppress_timeout_errors)
def _timeout_handler(self, signum, frame):
raise TimeoutError(self.timeout_message)
def __enter__(self):
signal.signal(signal.SIGALRM, self._timeout_handler) # Set handler for SIGALRM
signal.alarm(self.seconds) # start countdown for SIGALRM to be raised
def __exit__(self, exc_type, exc_val, exc_tb):
signal.alarm(0) # Cancel SIGALRM if it's scheduled
if self.suppress and exc_type is TimeoutError: # Suppress TimeoutError
return True
def try_except(func):
# try-except function. Usage: @try_except decorator
def handler(*args, **kwargs):
try:
func(*args, **kwargs)
except Exception as e:
print(e)
return handler
def methods(instance):
# Get class/instance methods
return [
f
for f in dir(instance)
if callable(getattr(instance, f)) and not f.startswith("__")
]
def set_logging(rank=-1, verbose=True):
logging.basicConfig(
format="%(message)s",
level=logging.INFO if (verbose and rank in [-1, 0]) else logging.WARN,
)
def print_args(name, opt):
# Print argparser arguments
print(colorstr(f"{name}: ") + ", ".join(f"{k}={v}" for k, v in vars(opt).items()))
def init_seeds(seed=0):
# Initialize random number generator (RNG) seeds https://pytorch.org/docs/stable/notes/randomness.html
# cudnn seed 0 settings are slower and more reproducible, else faster and less reproducible
import torch.backends.cudnn as cudnn
random.seed(seed)
np.random.seed(seed)
torch.manual_seed(seed)
cudnn.benchmark, cudnn.deterministic = (False, True) if seed == 0 else (True, False)
def get_latest_run(search_dir="."):
# Return path to most recent 'last.pt' in /runs (i.e. to --resume from)
last_list = glob.glob(f"{search_dir}/**/last*.pt", recursive=True)
return max(last_list, key=os.path.getctime) if last_list else ""
def user_config_dir(dir="Ultralytics", env_var="YOLOV5_CONFIG_DIR"):
# Return path of user configuration directory. Prefer environment variable if exists. Make dir if required.
env = os.getenv(env_var)
if env:
path = Path(env) # use environment variable
else:
cfg = {
"Windows": "AppData/Roaming",
"Linux": ".config",
"Darwin": "Library/Application Support",
} # 3 OS dirs
path = Path.home() / cfg.get(platform.system(), "") # OS-specific config dir
path = (
path if is_writeable(path) else Path("/tmp")
) / dir # GCP and AWS lambda fix, only /tmp is writeable
path.mkdir(exist_ok=True) # make if required
return path
def is_writeable(dir, test=False):
# Return True if directory has write permissions, test opening a file with write permissions if test=True
if test: # method 1
file = Path(dir) / "tmp.txt"
try:
with open(file, "w"): # open file with write permissions
pass
file.unlink() # remove file
return True
except IOError:
return False
else: # method 2
return os.access(dir, os.R_OK) # possible issues on Windows
def is_docker():
# Is environment a Docker container?
return Path("/workspace").exists() # or Path('/.dockerenv').exists()
def is_colab():
# Is environment a Google Colab instance?
try:
import google.colab
return True
except ImportError:
return False
def is_pip():
# Is file in a pip package?
return "site-packages" in Path(__file__).resolve().parts
def is_ascii(s=""):
# Is string composed of all ASCII (no UTF) characters? (note str().isascii() introduced in python 3.7)
s = str(s) # convert list, tuple, None, etc. to str
return len(s.encode().decode("ascii", "ignore")) == len(s)
def is_chinese(s="人工智能"):
# Is string composed of any Chinese characters?
return re.search("[\u4e00-\u9fff]", s)
def emojis(str=""):
# Return platform-dependent emoji-safe version of string
return (
str.encode().decode("ascii", "ignore")
if platform.system() == "Windows"
else str
)
def file_size(path):
# Return file/dir size (MB)
path = Path(path)
if path.is_file():
return path.stat().st_size / 1e6
elif path.is_dir():
return sum(f.stat().st_size for f in path.glob("**/*") if f.is_file()) / 1e6
else:
return 0.0
def check_online():
# Check internet connectivity
import socket
try:
socket.create_connection(("1.1.1.1", 443), 5) # check host accessibility
return True
except OSError:
return False
@try_except
def check_git_status():
# Recommend 'git pull' if code is out of date
msg = ", for updates see https://github.com/ultralytics/yolov5"
print(colorstr("github: "), end="")
assert Path(".git").exists(), "skipping check (not a git repository)" + msg
assert not is_docker(), "skipping check (Docker image)" + msg
assert check_online(), "skipping check (offline)" + msg
cmd = "git fetch && git config --get remote.origin.url"
url = (
check_output(cmd, shell=True, timeout=5).decode().strip().rstrip(".git")
) # git fetch
branch = (
check_output("git rev-parse --abbrev-ref HEAD", shell=True).decode().strip()
) # checked out
n = int(
check_output(f"git rev-list {branch}..origin/master --count", shell=True)
) # commits behind
if n > 0:
s = f"⚠️ YOLOv5 is out of date by {n} commit{'s' * (n > 1)}. Use `git pull` or `git clone {url}` to update."
else:
s = f"up to date with {url} ✅"
print(emojis(s)) # emoji-safe
def check_python(minimum="3.6.2"):
# Check current python version vs. required python version
check_version(platform.python_version(), minimum, name="Python ")
def check_version(current="0.0.0", minimum="0.0.0", name="version ", pinned=False):
# Check version vs. required version
current, minimum = (pkg.parse_version(x) for x in (current, minimum))
result = (current == minimum) if pinned else (current >= minimum)
assert (
result
), f"{name}{minimum} required by YOLOv5, but {name}{current} is currently installed"
@try_except
def check_requirements(
requirements=ROOT / "requirements.txt", exclude=(), install=True
):
# Check installed dependencies meet requirements (pass *.txt file or list of packages)
prefix = colorstr("red", "bold", "requirements:")
check_python() # check python version
if isinstance(requirements, (str, Path)): # requirements.txt file
file = Path(requirements)
assert file.exists(), f"{prefix} {file.resolve()} not found, check failed."
requirements = [
f"{x.name}{x.specifier}"
for x in pkg.parse_requirements(file.open())
if x.name not in exclude
]
else: # list or tuple of packages
requirements = [x for x in requirements if x not in exclude]
n = 0 # number of packages updates
for r in requirements:
try:
pkg.require(r)
except Exception as e: # DistributionNotFound or VersionConflict if requirements not met
s = f"{prefix} {r} not found and is required by YOLOv5"
if install:
print(f"{s}, attempting auto-update...")
try:
assert check_online(), f"'pip install {r}' skipped (offline)"
print(check_output(f"pip install '{r}'", shell=True).decode())
n += 1
except Exception as e:
print(f"{prefix} {e}")
else:
print(f"{s}. Please install and rerun your command.")
if n: # if packages updated
source = file.resolve() if "file" in locals() else requirements
s = (
f"{prefix} {n} package{'s' * (n > 1)} updated per {source}\n"
f"{prefix} ⚠️ {colorstr('bold', 'Restart runtime or rerun command for updates to take effect')}\n"
)
print(emojis(s))
def check_img_size(imgsz, s=32, floor=0):
# Verify image size is a multiple of stride s in each dimension
if isinstance(imgsz, int): # integer i.e. img_size=640
new_size = max(make_divisible(imgsz, int(s)), floor)
else: # list i.e. img_size=[640, 480]
new_size = [max(make_divisible(x, int(s)), floor) for x in imgsz]
if new_size != imgsz:
print(
f"WARNING: --img-size {imgsz} must be multiple of max stride {s}, updating to {new_size}"
)
return new_size
def check_imshow():
# Check if environment supports image displays
try:
assert not is_docker(), "cv2.imshow() is disabled in Docker environments"
assert not is_colab(), "cv2.imshow() is disabled in Google Colab environments"
cv2.imshow("test", np.zeros((1, 1, 3)))
cv2.waitKey(1)
cv2.destroyAllWindows()
cv2.waitKey(1)
return True
except Exception as e:
print(
f"WARNING: Environment does not support cv2.imshow() or PIL Image.show() image displays\n{e}"
)
return False
def check_suffix(file="yolov5s.pt", suffix=(".pt",), msg=""):
# Check file(s) for acceptable suffix
if file and suffix:
if isinstance(suffix, str):
suffix = [suffix]
for f in file if isinstance(file, (list, tuple)) else [file]:
s = Path(f).suffix.lower() # file suffix
if len(s):
assert s in suffix, f"{msg}{f} acceptable suffix is {suffix}"
def check_yaml(file, suffix=(".yaml", ".yml")):
# Search/download YAML file (if necessary) and return path, checking suffix
return check_file(file, suffix)
def check_file(file, suffix=""):
# Search/download file (if necessary) and return path
check_suffix(file, suffix) # optional
file = str(file) # convert to str()
if Path(file).is_file() or file == "": # exists
return file
elif file.startswith(("http:/", "https:/")): # download
url = str(Path(file)).replace(":/", "://") # Pathlib turns :// -> :/
file = Path(
urllib.parse.unquote(file).split("?")[0]
).name # '%2F' to '/', split https://url.com/file.txt?auth
print(f"Downloading {url} to {file}...")
torch.hub.download_url_to_file(url, file)
assert (
Path(file).exists() and Path(file).stat().st_size > 0
), f"File download failed: {url}" # check
return file
else: # search
files = []
for d in "data", "models", "utils": # search directories
files.extend(
glob.glob(str(ROOT / d / "**" / file), recursive=True)
) # find file
assert len(files), f"File not found: {file}" # assert file was found
assert (
len(files) == 1
), f"Multiple files match '{file}', specify exact path: {files}" # assert unique
return files[0] # return file
def check_dataset(data, autodownload=True):
# Download and/or unzip dataset if not found locally
# Usage: https://github.com/ultralytics/yolov5/releases/download/v1.0/coco128_with_yaml.zip
# Download (optional)
extract_dir = ""
if isinstance(data, (str, Path)) and str(data).endswith(
".zip"
): # i.e. gs://bucket/dir/coco128.zip
download(
data, dir="../datasets", unzip=True, delete=False, curl=False, threads=1
)
data = next((Path("../datasets") / Path(data).stem).rglob("*.yaml"))
extract_dir, autodownload = data.parent, False
# Read yaml (optional)
if isinstance(data, (str, Path)):
with open(data, errors="ignore") as f:
data = yaml.safe_load(f) # dictionary
# Parse yaml
path = extract_dir or Path(data.get("path") or "") # optional 'path' default to '.'
for k in "train", "val", "test":
if data.get(k): # prepend path
data[k] = (
str(path / data[k])
if isinstance(data[k], str)
else [str(path / x) for x in data[k]]
)
assert "nc" in data, "Dataset 'nc' key missing."
if "names" not in data:
data["names"] = [
f"class{i}" for i in range(data["nc"])
] # assign class names if missing
train, val, test, s = [data.get(x) for x in ("train", "val", "test", "download")]
if val:
val = [
Path(x).resolve() for x in (val if isinstance(val, list) else [val])
] # val path
if not all(x.exists() for x in val):
print(
"\nWARNING: Dataset not found, nonexistent paths: %s"
% [str(x) for x in val if not x.exists()]
)
if s and autodownload: # download script
root = (
path.parent if "path" in data else ".."
) # unzip directory i.e. '../'
if s.startswith("http") and s.endswith(".zip"): # URL
f = Path(s).name # filename
print(f"Downloading {s} to {f}...")
torch.hub.download_url_to_file(s, f)
Path(root).mkdir(parents=True, exist_ok=True) # create root
ZipFile(f).extractall(path=root) # unzip
Path(f).unlink() # remove zip
r = None # success
elif s.startswith("bash "): # bash script
print(f"Running {s} ...")
r = os.system(s)
else: # python script
r = exec(s, {"yaml": data}) # return None
print(
f"Dataset autodownload {f'success, saved to {root}' if r in (0, None) else 'failure'}\n"
)
else:
raise Exception("Dataset not found.")
return data # dictionary
def url2file(url):
# Convert URL to filename, i.e. https://url.com/file.txt?auth -> file.txt
url = str(Path(url)).replace(":/", "://") # Pathlib turns :// -> :/
file = Path(urllib.parse.unquote(url)).name.split("?")[
0
] # '%2F' to '/', split https://url.com/file.txt?auth
return file
def download(url, dir=".", unzip=True, delete=True, curl=False, threads=1):
# Multi-threaded file download and unzip function, used in data.yaml for autodownload
def download_one(url, dir):
# Download 1 file
f = dir / Path(url).name # filename
if Path(url).is_file(): # exists in current path
Path(url).rename(f) # move to dir
elif not f.exists():
print(f"Downloading {url} to {f}...")
if curl:
os.system(
f"curl -L '{url}' -o '{f}' --retry 9 -C -"
) # curl download, retry and resume on fail
else:
torch.hub.download_url_to_file(url, f, progress=True) # torch download
if unzip and f.suffix in (".zip", ".gz"):
print(f"Unzipping {f}...")
if f.suffix == ".zip":
ZipFile(f).extractall(path=dir) # unzip
elif f.suffix == ".gz":
os.system(f"tar xfz {f} --directory {f.parent}") # unzip
if delete:
f.unlink() # remove zip
dir = Path(dir)
dir.mkdir(parents=True, exist_ok=True) # make directory
if threads > 1:
pool = ThreadPool(threads)
pool.imap(lambda x: download_one(*x), zip(url, repeat(dir))) # multi-threaded
pool.close()
pool.join()
else:
for u in [url] if isinstance(url, (str, Path)) else url:
download_one(u, dir)
def make_divisible(x, divisor):
# Returns x evenly divisible by divisor
return math.ceil(x / divisor) * divisor
def clean_str(s):
# Cleans a string by replacing special characters with underscore _
return re.sub(pattern="[|@#!¡·$€%&()=?¿^*;:,¨´><+]", repl="_", string=s)
def one_cycle(y1=0.0, y2=1.0, steps=100):
# lambda function for sinusoidal ramp from y1 to y2 https://arxiv.org/pdf/1812.01187.pdf
return lambda x: ((1 - math.cos(x * math.pi / steps)) / 2) * (y2 - y1) + y1
def colorstr(*input):
# Colors a string https://en.wikipedia.org/wiki/ANSI_escape_code, i.e. colorstr('blue', 'hello world')
*args, string = (
input if len(input) > 1 else ("blue", "bold", input[0])
) # color arguments, string
colors = {
"black": "\033[30m", # basic colors
"red": "\033[31m",
"green": "\033[32m",
"yellow": "\033[33m",
"blue": "\033[34m",
"magenta": "\033[35m",
"cyan": "\033[36m",
"white": "\033[37m",
"bright_black": "\033[90m", # bright colors
"bright_red": "\033[91m",
"bright_green": "\033[92m",
"bright_yellow": "\033[93m",
"bright_blue": "\033[94m",
"bright_magenta": "\033[95m",
"bright_cyan": "\033[96m",
"bright_white": "\033[97m",
"end": "\033[0m", # misc
"bold": "\033[1m",
"underline": "\033[4m",
}
return "".join(colors[x] for x in args) + f"{string}" + colors["end"]
def labels_to_class_weights(labels, nc=80):
# Get class weights (inverse frequency) from training labels
if labels[0] is None: # no labels loaded
return torch.Tensor()
labels = np.concatenate(labels, 0) # labels.shape = (866643, 5) for COCO
classes = labels[:, 0].astype(np.int) # labels = [class xywh]
weights = np.bincount(classes, minlength=nc) # occurrences per class
# Prepend gridpoint count (for uCE training)
# gpi = ((320 / 32 * np.array([1, 2, 4])) ** 2 * 3).sum() # gridpoints per image
# weights = np.hstack([gpi * len(labels) - weights.sum() * 9, weights * 9]) ** 0.5 # prepend gridpoints to start
weights[weights == 0] = 1 # replace empty bins with 1
weights = 1 / weights # number of targets per class
weights /= weights.sum() # normalize
return torch.from_numpy(weights)
def labels_to_image_weights(labels, nc=80, class_weights=np.ones(80)):
# Produces image weights based on class_weights and image contents
class_counts = np.array(
[np.bincount(x[:, 0].astype(np.int), minlength=nc) for x in labels]
)
image_weights = (class_weights.reshape(1, nc) * class_counts).sum(1)
# index = random.choices(range(n), weights=image_weights, k=1) # weight image sample
return image_weights
def coco80_to_coco91_class(): # converts 80-index (val2014) to 91-index (paper)
# https://tech.amikelive.com/node-718/what-object-categories-labels-are-in-coco-dataset/
# a = np.loadtxt('data/coco.names', dtype='str', delimiter='\n')
# b = np.loadtxt('data/coco_paper.names', dtype='str', delimiter='\n')
# x1 = [list(a[i] == b).index(True) + 1 for i in range(80)] # darknet to coco
# x2 = [list(b[i] == a).index(True) if any(b[i] == a) else None for i in range(91)] # coco to darknet
x = [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
13,
14,
15,
16,
17,
18,
19,
20,
21,
22,
23,
24,
25,
27,
28,
31,
32,
33,
34,
35,
36,
37,
38,
39,
40,
41,
42,
43,
44,
46,
47,
48,
49,
50,
51,
52,
53,
54,
55,
56,
57,
58,
59,
60,
61,
62,
63,
64,
65,
67,
70,
72,
73,
74,
75,
76,
77,
78,
79,
80,
81,
82,
84,
85,
86,
87,
88,
89,
90,
]
return x
def xyxy2xywh(x):
# Convert nx4 boxes from [x1, y1, x2, y2] to [x, y, w, h] where xy1=top-left, xy2=bottom-right
y = x.clone() if isinstance(x, torch.Tensor) else np.copy(x)
y[:, 0] = (x[:, 0] + x[:, 2]) / 2 # x center
y[:, 1] = (x[:, 1] + x[:, 3]) / 2 # y center
y[:, 2] = x[:, 2] - x[:, 0] # width
y[:, 3] = x[:, 3] - x[:, 1] # height
return y
def xywh2xyxy(x):
# Convert nx4 boxes from [x, y, w, h] to [x1, y1, x2, y2] where xy1=top-left, xy2=bottom-right
y = x.clone() if isinstance(x, torch.Tensor) else np.copy(x)
y[:, 0] = x[:, 0] - x[:, 2] / 2 # top left x
y[:, 1] = x[:, 1] - x[:, 3] / 2 # top left y
y[:, 2] = x[:, 0] + x[:, 2] / 2 # bottom right x
y[:, 3] = x[:, 1] + x[:, 3] / 2 # bottom right y
return y
def xywhn2xyxy(x, w=640, h=640, padw=0, padh=0):
# Convert nx4 boxes from [x, y, w, h] normalized to [x1, y1, x2, y2] where xy1=top-left, xy2=bottom-right
y = x.clone() if isinstance(x, torch.Tensor) else np.copy(x)
y[:, 0] = w * (x[:, 0] - x[:, 2] / 2) + padw # top left x
y[:, 1] = h * (x[:, 1] - x[:, 3] / 2) + padh # top left y
y[:, 2] = w * (x[:, 0] + x[:, 2] / 2) + padw # bottom right x
y[:, 3] = h * (x[:, 1] + x[:, 3] / 2) + padh # bottom right y
return y
def xyxy2xywhn(x, w=640, h=640, clip=False, eps=0.0):
# Convert nx4 boxes from [x1, y1, x2, y2] to [x, y, w, h] normalized where xy1=top-left, xy2=bottom-right
if clip:
clip_coords(x, (h - eps, w - eps)) # warning: inplace clip
y = x.clone() if isinstance(x, torch.Tensor) else np.copy(x)
y[:, 0] = ((x[:, 0] + x[:, 2]) / 2) / w # x center
y[:, 1] = ((x[:, 1] + x[:, 3]) / 2) / h # y center
y[:, 2] = (x[:, 2] - x[:, 0]) / w # width
y[:, 3] = (x[:, 3] - x[:, 1]) / h # height
return y
def xyn2xy(x, w=640, h=640, padw=0, padh=0):
# Convert normalized segments into pixel segments, shape (n,2)
y = x.clone() if isinstance(x, torch.Tensor) else np.copy(x)
y[:, 0] = w * x[:, 0] + padw # top left x
y[:, 1] = h * x[:, 1] + padh # top left y
return y
def segment2box(segment, width=640, height=640):
# Convert 1 segment label to 1 box label, applying inside-image constraint, i.e. (xy1, xy2, ...) to (xyxy)
x, y = segment.T # segment xy
inside = (x >= 0) & (y >= 0) & (x <= width) & (y <= height)
x, y, = (
x[inside],
y[inside],
)
return (
np.array([x.min(), y.min(), x.max(), y.max()]) if any(x) else np.zeros((1, 4))
) # xyxy
def segments2boxes(segments):
# Convert segment labels to box labels, i.e. (cls, xy1, xy2, ...) to (cls, xywh)
boxes = []
for s in segments:
x, y = s.T # segment xy
boxes.append([x.min(), y.min(), x.max(), y.max()]) # cls, xyxy
return xyxy2xywh(np.array(boxes)) # cls, xywh
def resample_segments(segments, n=1000):
# Up-sample an (n,2) segment
for i, s in enumerate(segments):
x = np.linspace(0, len(s) - 1, n)
xp = np.arange(len(s))
segments[i] = (
np.concatenate([np.interp(x, xp, s[:, i]) for i in range(2)])
.reshape(2, -1)
.T
) # segment xy
return segments
def scale_coords(img1_shape, coords, img0_shape, ratio_pad=None):
# Rescale coords (xyxy) from img1_shape to img0_shape
if ratio_pad is None: # calculate from img0_shape
gain = min(
img1_shape[0] / img0_shape[0], img1_shape[1] / img0_shape[1]
) # gain = old / new
pad = (img1_shape[1] - img0_shape[1] * gain) / 2, (
img1_shape[0] - img0_shape[0] * gain
) / 2 # wh padding
else:
gain = ratio_pad[0][0]
pad = ratio_pad[1]
coords[:, [0, 2]] -= pad[0] # x padding
coords[:, [1, 3]] -= pad[1] # y padding
coords[:, :4] /= gain
clip_coords(coords, img0_shape)
return coords
def clip_coords(boxes, shape):
# Clip bounding xyxy bounding boxes to image shape (height, width)
if isinstance(boxes, torch.Tensor): # faster individually
boxes[:, 0].clamp_(0, shape[1]) # x1
boxes[:, 1].clamp_(0, shape[0]) # y1
boxes[:, 2].clamp_(0, shape[1]) # x2
boxes[:, 3].clamp_(0, shape[0]) # y2
else: # np.array (faster grouped)
boxes[:, [0, 2]] = boxes[:, [0, 2]].clip(0, shape[1]) # x1, x2
boxes[:, [1, 3]] = boxes[:, [1, 3]].clip(0, shape[0]) # y1, y2
def non_max_suppression(
prediction,
conf_thres=0.25,
iou_thres=0.45,
classes=None,
agnostic=False,
multi_label=False,
labels=(),
max_det=300,
):
"""Runs Non-Maximum Suppression (NMS) on inference results
Returns:
list of detections, on (n,6) tensor per image [xyxy, conf, cls]
"""
nc = prediction.shape[2] - 5 # number of classes
xc = prediction[..., 4] > conf_thres # candidates
# Checks
assert (
0 <= conf_thres <= 1
), f"Invalid Confidence threshold {conf_thres}, valid values are between 0.0 and 1.0"
assert (
0 <= iou_thres <= 1
), f"Invalid IoU {iou_thres}, valid values are between 0.0 and 1.0"
# Settings
min_wh, max_wh = 2, 4096 # (pixels) minimum and maximum box width and height
max_nms = 30000 # maximum number of boxes into torchvision.ops.nms()
time_limit = 10.0 # seconds to quit after
redundant = True # require redundant detections
multi_label &= nc > 1 # multiple labels per box (adds 0.5ms/img)
merge = False # use merge-NMS
t = time.time()
output = [torch.zeros((0, 6), device=prediction.device)] * prediction.shape[0]
for xi, x in enumerate(prediction): # image index, image inference
# Apply constraints
# x[((x[..., 2:4] < min_wh) | (x[..., 2:4] > max_wh)).any(1), 4] = 0 # width-height
x = x[xc[xi]] # confidence
# Cat apriori labels if autolabelling
if labels and len(labels[xi]):
l = labels[xi]
v = torch.zeros((len(l), nc + 5), device=x.device)
v[:, :4] = l[:, 1:5] # box
v[:, 4] = 1.0 # conf
v[range(len(l)), l[:, 0].long() + 5] = 1.0 # cls
x = torch.cat((x, v), 0)
# If none remain process next image
if not x.shape[0]:
continue
# Compute conf
x[:, 5:] *= x[:, 4:5] # conf = obj_conf * cls_conf
# Box (center x, center y, width, height) to (x1, y1, x2, y2)
box = xywh2xyxy(x[:, :4])
# Detections matrix nx6 (xyxy, conf, cls)
if multi_label:
i, j = (x[:, 5:] > conf_thres).nonzero(as_tuple=False).T
x = torch.cat((box[i], x[i, j + 5, None], j[:, None].float()), 1)
else: # best class only
conf, j = x[:, 5:].max(1, keepdim=True)
x = torch.cat((box, conf, j.float()), 1)[conf.view(-1) > conf_thres]
# Filter by class
if classes is not None:
x = x[(x[:, 5:6] == torch.tensor(classes, device=x.device)).any(1)]
# Apply finite constraint
# if not torch.isfinite(x).all():
# x = x[torch.isfinite(x).all(1)]
# Check shape
n = x.shape[0] # number of boxes
if not n: # no boxes
continue
elif n > max_nms: # excess boxes
x = x[x[:, 4].argsort(descending=True)[:max_nms]] # sort by confidence
# Batched NMS
c = x[:, 5:6] * (0 if agnostic else max_wh) # classes
boxes, scores = x[:, :4] + c, x[:, 4] # boxes (offset by class), scores
i = torchvision.ops.nms(boxes, scores, iou_thres) # NMS
if i.shape[0] > max_det: # limit detections
i = i[:max_det]
if merge and (1 < n < 3e3): # Merge NMS (boxes merged using weighted mean)
# update boxes as boxes(i,4) = weights(i,n) * boxes(n,4)
iou = box_iou(boxes[i], boxes) > iou_thres # iou matrix
weights = iou * scores[None] # box weights
x[i, :4] = torch.mm(weights, x[:, :4]).float() / weights.sum(
1, keepdim=True
) # merged boxes
if redundant:
i = i[iou.sum(1) > 1] # require redundancy
output[xi] = x[i]
if (time.time() - t) > time_limit:
print(f"WARNING: NMS time limit {time_limit}s exceeded")
break # time limit exceeded
return output
def strip_optimizer(
f="best.pt", s=""
): # from utils.general import *; strip_optimizer()
# Strip optimizer from 'f' to finalize training, optionally save as 's'
x = torch.load(f, map_location=torch.device("cpu"))
if x.get("ema"):
x["model"] = x["ema"] # replace model with ema
for k in "optimizer", "training_results", "wandb_id", "ema", "updates": # keys
x[k] = None
x["epoch"] = -1
x["model"].half() # to FP16
for p in x["model"].parameters():
p.requires_grad = False
torch.save(x, s or f)
mb = os.path.getsize(s or f) / 1e6 # filesize
print(
f"Optimizer stripped from {f},{(' saved as %s,' % s) if s else ''} {mb:.1f}MB"
)
def print_mutation(results, hyp, save_dir, bucket):
evolve_csv, results_csv, evolve_yaml = (
save_dir / "evolve.csv",
save_dir / "results.csv",
save_dir / "hyp_evolve.yaml",
)
keys = (
"metrics/precision",
"metrics/recall",
"metrics/mAP_0.5",
"metrics/mAP_0.5:0.95",
"val/box_loss",
"val/obj_loss",
"val/cls_loss",
) + tuple(
hyp.keys()
) # [results + hyps]
keys = tuple(x.strip() for x in keys)
vals = results + tuple(hyp.values())
n = len(keys)
# Download (optional)
if bucket:
url = f"gs://{bucket}/evolve.csv"
if gsutil_getsize(url) > (
os.path.getsize(evolve_csv) if os.path.exists(evolve_csv) else 0
):
os.system(
f"gsutil cp {url} {save_dir}"
) # download evolve.csv if larger than local
# Log to evolve.csv
s = (
"" if evolve_csv.exists() else (("%20s," * n % keys).rstrip(",") + "\n")
) # add header
with open(evolve_csv, "a") as f:
f.write(s + ("%20.5g," * n % vals).rstrip(",") + "\n")
# Print to screen
print(colorstr("evolve: ") + ", ".join(f"{x.strip():>20s}" for x in keys))
print(colorstr("evolve: ") + ", ".join(f"{x:20.5g}" for x in vals), end="\n\n\n")
# Save yaml
with open(evolve_yaml, "w") as f:
data = pd.read_csv(evolve_csv)
data = data.rename(columns=lambda x: x.strip()) # strip keys
i = np.argmax(fitness(data.values[:, :7])) #
f.write(
"# YOLOv5 Hyperparameter Evolution Results\n"
+ f"# Best generation: {i}\n"
+ f"# Last generation: {len(data)}\n"
+ "# "
+ ", ".join(f"{x.strip():>20s}" for x in keys[:7])
+ "\n"
+ "# "
+ ", ".join(f"{x:>20.5g}" for x in data.values[i, :7])
+ "\n\n"
)
yaml.safe_dump(hyp, f, sort_keys=False)
if bucket:
os.system(f"gsutil cp {evolve_csv} {evolve_yaml} gs://{bucket}") # upload
def apply_classifier(x, model, img, im0):
# Apply a second stage classifier to yolo outputs
im0 = [im0] if isinstance(im0, np.ndarray) else im0
for i, d in enumerate(x): # per image
if d is not None and len(d):
d = d.clone()
# Reshape and pad cutouts
b = xyxy2xywh(d[:, :4]) # boxes
b[:, 2:] = b[:, 2:].max(1)[0].unsqueeze(1) # rectangle to square
b[:, 2:] = b[:, 2:] * 1.3 + 30 # pad
d[:, :4] = xywh2xyxy(b).long()
# Rescale boxes from img_size to im0 size
scale_coords(img.shape[2:], d[:, :4], im0[i].shape)
# Classes
pred_cls1 = d[:, 5].long()
ims = []
for j, a in enumerate(d): # per item
cutout = im0[i][int(a[1]) : int(a[3]), int(a[0]) : int(a[2])]
im = cv2.resize(cutout, (224, 224)) # BGR
# cv2.imwrite('example%i.jpg' % j, cutout)
im = im[:, :, ::-1].transpose(2, 0, 1) # BGR to RGB, to 3x416x416
im = np.ascontiguousarray(im, dtype=np.float32) # uint8 to float32
im /= 255.0 # 0 - 255 to 0.0 - 1.0
ims.append(im)
pred_cls2 = model(torch.Tensor(ims).to(d.device)).argmax(
1
) # classifier prediction
x[i] = x[i][pred_cls1 == pred_cls2] # retain matching class detections
return x
def save_one_box(
xyxy, im, file="image.jpg", gain=1.02, pad=10, square=False, BGR=False, save=True
):
# Save image crop as {file} with crop size multiple {gain} and {pad} pixels. Save and/or return crop
xyxy = torch.tensor(xyxy).view(-1, 4)
b = xyxy2xywh(xyxy) # boxes
if square:
b[:, 2:] = b[:, 2:].max(1)[0].unsqueeze(1) # attempt rectangle to square
b[:, 2:] = b[:, 2:] * gain + pad # box wh * gain + pad
xyxy = xywh2xyxy(b).long()
clip_coords(xyxy, im.shape)
crop = im[
int(xyxy[0, 1]) : int(xyxy[0, 3]),
int(xyxy[0, 0]) : int(xyxy[0, 2]),
:: (1 if BGR else -1),
]
if save:
cv2.imwrite(str(increment_path(file, mkdir=True).with_suffix(".jpg")), crop)
return crop
def increment_path(path, exist_ok=False, sep="", mkdir=False):
# Increment file or directory path, i.e. runs/exp --> runs/exp{sep}2, runs/exp{sep}3, ... etc.
path = Path(path) # os-agnostic
if path.exists() and not exist_ok:
suffix = path.suffix
path = path.with_suffix("")
dirs = glob.glob(f"{path}{sep}*") # similar paths
matches = [re.search(rf"%s{sep}(\d+)" % path.stem, d) for d in dirs]
i = [int(m.groups()[0]) for m in matches if m] # indices
n = max(i) + 1 if i else 2 # increment number
path = Path(f"{path}{sep}{n}{suffix}") # update path
dir = path if path.suffix == "" else path.parent # directory
if not dir.exists() and mkdir:
dir.mkdir(parents=True, exist_ok=True) # make directory
return path
| 34.418182
| 118
| 0.557622
|
64157e58daca1bb191592aa5abfd32c7d05d764a
| 511
|
py
|
Python
|
tuiuiu/tuiuiucore/migrations/0035_page_last_published_at.py
|
caputomarcos/tuiuiu.io
|
d8fb57cf95487e7fe1454b2130ef18acc916da46
|
[
"BSD-3-Clause"
] | 3
|
2019-08-08T09:09:35.000Z
|
2020-12-15T18:04:17.000Z
|
tuiuiu/tuiuiucore/migrations/0035_page_last_published_at.py
|
caputomarcos/tuiuiu.io
|
d8fb57cf95487e7fe1454b2130ef18acc916da46
|
[
"BSD-3-Clause"
] | null | null | null |
tuiuiu/tuiuiucore/migrations/0035_page_last_published_at.py
|
caputomarcos/tuiuiu.io
|
d8fb57cf95487e7fe1454b2130ef18acc916da46
|
[
"BSD-3-Clause"
] | 1
|
2017-09-09T20:10:40.000Z
|
2017-09-09T20:10:40.000Z
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.1 on 2017-05-22 13:35
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('tuiuiucore', '0034_page_live_revision'),
]
operations = [
migrations.AddField(
model_name='page',
name='last_published_at',
field=models.DateTimeField(editable=False, null=True, verbose_name='last published at'),
),
]
| 24.333333
| 100
| 0.641879
|
8c7b819b66d9a9e85664a3dc91130f2a15bb620e
| 2,719
|
py
|
Python
|
tests/parser/functions/rlp/conftest.py
|
Dexaran/viper
|
9c1992f56c2b78416f981452f0457449f7670d1a
|
[
"MIT"
] | 1
|
2018-07-26T00:56:30.000Z
|
2018-07-26T00:56:30.000Z
|
tests/parser/functions/rlp/conftest.py
|
Dexaran/viper
|
9c1992f56c2b78416f981452f0457449f7670d1a
|
[
"MIT"
] | null | null | null |
tests/parser/functions/rlp/conftest.py
|
Dexaran/viper
|
9c1992f56c2b78416f981452f0457449f7670d1a
|
[
"MIT"
] | 2
|
2018-04-06T02:55:43.000Z
|
2018-07-26T00:56:36.000Z
|
import pytest
import rlp
from viper import utils as viper_utils
from ethereum import transactions, messages
@pytest.fixture
def inject_tx(utils, chain):
def inject_tx(txhex):
tx = rlp.decode(utils.decode_hex(txhex[2:]), transactions.Transaction)
chain.head_state.set_balance(tx.sender, tx.startgas * tx.gasprice)
chain.chain.state.set_balance(tx.sender, tx.startgas * tx.gasprice)
messages.apply_transaction(chain.head_state, tx)
chain.block.transactions.append(tx)
contract_address = utils.sha3(rlp.encode([tx.sender, 0]))[12:]
assert chain.head_state.get_code(contract_address)
chain.mine(1)
chain.head_state.gas_limit = 10**9
return contract_address
return inject_tx
@pytest.fixture
def fake_tx(inject_tx):
def fake_tx():
tx = "0xf9035b808506fc23ac0083045ef88080b903486103305660006109ac5260006109cc527f0100000000000000000000000000000000000000000000000000000000000000600035046109ec526000610a0c5260006109005260c06109ec51101515585760f86109ec51101561006e5760bf6109ec510336141558576001610a0c52610098565b60013560f76109ec51036020035260005160f66109ec510301361415585760f66109ec5103610a0c525b61010060016064818352015b36610a0c511015156100b557610291565b7f0100000000000000000000000000000000000000000000000000000000000000610a0c5135046109ec526109cc5160206109ac51026040015260016109ac51016109ac5260806109ec51101561013b5760016109cc5161044001526001610a0c516109cc5161046001376001610a0c5101610a0c5260216109cc51016109cc52610281565b60b86109ec5110156101d15760806109ec51036109cc51610440015260806109ec51036001610a0c51016109cc51610460013760816109ec5114156101ac5760807f01000000000000000000000000000000000000000000000000000000000000006001610a0c5101350410151558575b607f6109ec5103610a0c5101610a0c5260606109ec51036109cc51016109cc52610280565b60c06109ec51101561027d576001610a0c51013560b76109ec510360200352600051610a2c526038610a2c5110157f01000000000000000000000000000000000000000000000000000000000000006001610a0c5101350402155857610a2c516109cc516104400152610a2c5160b66109ec5103610a0c51016109cc516104600137610a2c5160b66109ec5103610a0c510101610a0c526020610a2c51016109cc51016109cc5261027f565bfe5b5b5b81516001018083528114156100a4575b5050601f6109ac511115155857602060206109ac5102016109005260206109005103610a0c5261010060016064818352015b6000610a0c5112156102d45761030a565b61090051610a0c516040015101610a0c51610900516104400301526020610a0c5103610a0c5281516001018083528114156102c3575b50506109cc516109005101610420526109cc5161090051016109005161044003f35b61000461033003610004600039610004610330036000f31b2d4f"
address = inject_tx(tx)
assert viper_utils.bytes_to_int(address) == viper_utils.RLP_DECODER_ADDRESS
return address
return fake_tx
| 93.758621
| 1,741
| 0.891136
|
9de3ba250eb73333853b29db3482e48a696b5b19
| 7,205
|
py
|
Python
|
implementations-from-scratch/neuralnetwork/neuralnetwork_test.py
|
georgepachitariu/machine-learning-portfolio
|
47452524b0f2ccf409ba12e6a717157e569d62e1
|
[
"Apache-2.0"
] | 2
|
2020-11-25T11:27:34.000Z
|
2021-01-19T17:42:47.000Z
|
implementations-from-scratch/neuralnetwork/neuralnetwork_test.py
|
georgepachitariu/machine-learning-portfolio
|
47452524b0f2ccf409ba12e6a717157e569d62e1
|
[
"Apache-2.0"
] | null | null | null |
implementations-from-scratch/neuralnetwork/neuralnetwork_test.py
|
georgepachitariu/machine-learning-portfolio
|
47452524b0f2ccf409ba12e6a717157e569d62e1
|
[
"Apache-2.0"
] | null | null | null |
import neuralnetwork as nn
import unittest
import numpy as np
import warnings
class SigmoidTests(unittest.TestCase):
def test_sigmoid(self):
assert 0 < nn.Sigmoid.compute(np.array([-10])) < 0.0001
assert nn.Sigmoid.compute(np.array([0])) == 0.5
assert 0.9999 < nn.Sigmoid.compute(np.array([10])) < 1
def test_sigmoid_derivative(self):
assert 0 < nn.Sigmoid.derivative(np.array([-10])) < 0.0001
assert nn.Sigmoid.derivative(np.array([0])) == 0.25
assert 0 < nn.Sigmoid.derivative(np.array([10])) < 0.0001
class FeedForward(unittest.TestCase):
def test_feedforward_2nodes_bias_nextlayer_2nodes_4records(self):
x = np.array([[1, 2, 2],
[1, 0, 1],
[1, -1, 0],
[1, 1, 0]])
weights = np.array([[-1.0, 1.0, 0],
[-0.1, 0.1, 0]])
result = nn.FeedForward.multiply_weights_and_input(x, weights)
assert np.array_equal(result, np.array([[1, 0.1],
[-1, -0.1],
[-2, -0.2],
[0, -0]]))
class CostTests(unittest.TestCase):
def test_compute_regularization_term_2layers_weights(self):
reg_value = 0.5
nr_examples = 2
weights = [np.array([[0.1, 0.2], [0.1, 0.2]]), np.array([[0.1, 0.2]])]
result = nn.Cost.get_regularization_term(reg_value, nr_examples, weights)
assert abs(result - 0.01875) < 0.0001
def test_compute_cost_correct_1(self):
predicted = np.array([[0.99999]])
y = np.array([[1]])
assert nn.Cost.compute(predicted, y, [], 0) < 0.0001
def test_compute_cost_correct_2(self):
predicted = np.array([[0.00001]])
y = np.array([[0]])
assert nn.Cost.compute(predicted, y, [], 0) < 0.0001
def test_compute_cost_incorrect_1(self):
predicted = np.array([[0.00001]])
y = np.array([[1]])
assert nn.Cost.compute(predicted, y, [], 0) > 1
def test_compute_cost_incorrect_2(self):
predicted = np.array([[0.99999]])
y = np.array([[0]])
assert nn.Cost.compute(predicted, y, [], 0) > 1
def test_compute_cost_2records_2nodes_and_regularization(self):
predicted = np.array([[0.99999], [0.00001]])
y = np.array([[0],[0]])
weights = [np.array([[0.1, 0.2], [0.1, 0.2]])]
# -1/2 * (1*ln(1-0.99999)+1*ln(1-0.00001)) +
# 0.5 / (2*2)*(0.01*2+0.04*2) = 5.7689
result = nn.Cost.compute(predicted, y, weights, 0.5)
assert 0 < result - 5.7689 < 0.0001
def test_cost_compute_derivative_3nodes_4examples(self):
# next layer has 2 nodes
backpropagation_error=np.array([[0.5, 0.2],
[0.5, 0.2],
[0.5, 0.2],
[0.5, 0.2]])
activation_values=np.array([[0.2, 0.2, 0.2],
[0.3, 0.3, 0.3],
[0.4, 0.4, 0.4],
[0.5, 0.5, 0.5]])
reg_value=0.2
layer_weights=np.array([[0.5, 0.5, 0.5],
[0.1, 0.1, 0.1]])
# (0.5, 0.5, 0.5, 0.5) (0.2, 0.2, 0.2) (0.175, 0.175, 0.175)
# 1/4 * (0.2, 0.2, 0.2, 0.2) * (0.3, 0.3, 0.3) = (0.07, 0.07, 0.07 )
# (0.4, 0.4, 0.4)
# (0.5, 0.5, 0.5)
# (0.175, 0.175, 0.175) (0.5, 0.5, 0.5) (0.275, 0.275, 0.275)
# (0.07, 0.07, 0.07 ) + 0.2 * (0.1, 0.1, 0.1) = (0.09, 0.09, 0.09 )
result = nn.Cost.derivative(backpropagation_error, activation_values,
layer_weights, reg_value)
assert np.allclose(result, np.array([[0.275, 0.275, 0.275],
[0.09, 0.09, 0.09]]))
class BackpropagationTests(unittest.TestCase):
def test_compute_error_final_layer_3labels_2records(self):
feedforward_mult = np.array([[0.1, 0.1, 0.9], [0.4, 0.4, 0.5]])
training_y = np.array([[0, 0, 1], [0, 1, 0]])
result=nn.BackPropagation.compute_last_layer_error_deriv(feedforward_mult, training_y)
result=np.around(result, decimals=1)
assert np.array_equal(result, np.array([[0.1, 0.1, -0.1], [0.4, -0.6, 0.5]]))
def test_compute_error_middle_layer_3nodes_bias_2records(self):
# previous layer, in backpropagation, has 3 nodes
error_previous_layer = np.array([[0.5, 0.5, 0.5], [0.1, 0.4, -0.3]])
layer_weights = np.array([[1, 2, 3, 4],
[1, 1, 1, 1],
[1, 1, 1, 1]])
feedforward_mult = np.array([[1, 2, 3], [0.1, 0.2, 0.3]])
# feedforw prev_err
# (1, 0.1) (0.5, 0.1) (0.5, 0.01)
# (2, 0.2) * (0.5, 0.4) = (1, 0.08)
# (3, 0.3) (0.5,-0.3) (1.5, -0.09)
# (0.5, 1, 1.5 ) (1, 2, 3, 4) (3, 3.5, 4, 4.5 )
# (0.01, 0.08, -0.09) * (1, 1, 1, 1) = (0, 0.01, 0.02, 0.03)
# (1, 1, 1, 1)
result=nn.BackPropagation.compute_current_layer_error_deriv(error_previous_layer, layer_weights,
feedforward_mult, lambda i: i, with_bias=True)
assert np.allclose(result, np.array([[3.5, 4, 4.5],
[0.01, 0.02, 0.03]]))
def test_warning_0error(self):
feedforward_mult = np.array([[1,0]])
training_y = np.array([[1,1]])
with warnings.catch_warnings(record=True) as w:
nn.BackPropagation.compute_last_layer_error_deriv(feedforward_mult, training_y, debug=True)
assert str(w[0].message) == 'Number of backpropagation errors ' \
'with value zero increased to: 50% of total errors'
class NeuralNetworkTests(unittest.TestCase):
def test_basic(self):
net=nn.NeuralNetwork(layers_shape=(2,3), with_bias=False)
assert len(net.layers_weights) == 1
assert net.layers_weights[0].shape == (3,2)
assert np.all(-1 <= net.layers_weights[0]) and np.all(net.layers_weights[0] < 1)
def test_basic_weights(self, ):
net = nn.NeuralNetwork(layers_weights=[np.array([1])], regularization_value=5)
assert net.layers_weights == [np.array([1])]
assert net.regularization_value == 5
def test_print_perc_predicted_correctly(self):
predicted=np.array([[0.6, 0.2],
[0.2, 0.9],
[0.9, 0.99],
[0.8, 0.2]])
y = np.array([[1, 0],
[1, 0],
[1, 0],
[0, 1]])
count, percentage = nn.NeuralNetwork().get_predicted_correctly(predicted, y)
assert count == 1
assert percentage == 25
if __name__ == '__main__':
unittest.main()
| 39.80663
| 114
| 0.489382
|
9e883cacad4c967af960c181b102eb007d005e2d
| 8,176
|
py
|
Python
|
pyperformance/benchmarks/bm_pickle.py
|
sourcery-ai-bot/pyperformance
|
f4e5667b080e05227f530ec7c985e6399e86347f
|
[
"MIT"
] | null | null | null |
pyperformance/benchmarks/bm_pickle.py
|
sourcery-ai-bot/pyperformance
|
f4e5667b080e05227f530ec7c985e6399e86347f
|
[
"MIT"
] | null | null | null |
pyperformance/benchmarks/bm_pickle.py
|
sourcery-ai-bot/pyperformance
|
f4e5667b080e05227f530ec7c985e6399e86347f
|
[
"MIT"
] | null | null | null |
"""Script for testing the performance of pickling/unpickling.
This will pickle/unpickle several real world-representative objects a few
thousand times. The methodology below was chosen for was chosen to be similar
to real-world scenarios which operate on single objects at a time. Note that if
we did something like
pickle.dumps([dict(some_dict) for _ in range(10000)])
this isn't equivalent to dumping the dict 10000 times: pickle uses a
highly-efficient encoding for the n-1 following copies.
"""
import datetime
import random
import sys
import pyperf
IS_PYPY = (pyperf.python_implementation() == 'pypy')
__author__ = "collinwinter@google.com (Collin Winter)"
DICT = {
'ads_flags': 0,
'age': 18,
'birthday': datetime.date(1980, 5, 7),
'bulletin_count': 0,
'comment_count': 0,
'country': 'BR',
'encrypted_id': 'G9urXXAJwjE',
'favorite_count': 9,
'first_name': '',
'flags': 412317970704,
'friend_count': 0,
'gender': 'm',
'gender_for_display': 'Male',
'id': 302935349,
'is_custom_profile_icon': 0,
'last_name': '',
'locale_preference': 'pt_BR',
'member': 0,
'tags': ['a', 'b', 'c', 'd', 'e', 'f', 'g'],
'profile_foo_id': 827119638,
'secure_encrypted_id': 'Z_xxx2dYx3t4YAdnmfgyKw',
'session_number': 2,
'signup_id': '201-19225-223',
'status': 'A',
'theme': 1,
'time_created': 1225237014,
'time_updated': 1233134493,
'unread_message_count': 0,
'user_group': '0',
'username': 'collinwinter',
'play_count': 9,
'view_count': 7,
'zip': ''}
TUPLE = (
[265867233, 265868503, 265252341, 265243910, 265879514,
266219766, 266021701, 265843726, 265592821, 265246784,
265853180, 45526486, 265463699, 265848143, 265863062,
265392591, 265877490, 265823665, 265828884, 265753032], 60)
def mutate_dict(orig_dict, random_source):
new_dict = dict(orig_dict)
for key, value in new_dict.items():
rand_val = random_source.random() * sys.maxsize
if isinstance(key, (int, bytes, str)):
new_dict[key] = type(key)(rand_val)
return new_dict
random_source = random.Random(5) # Fixed seed.
DICT_GROUP = [mutate_dict(DICT, random_source) for _ in range(3)]
def bench_pickle(loops, pickle, options):
range_it = range(loops)
# micro-optimization: use fast local variables
dumps = pickle.dumps
objs = (DICT, TUPLE, DICT_GROUP)
protocol = options.protocol
t0 = pyperf.perf_counter()
for _ in range_it:
for obj in objs:
# 20 dumps
dumps(obj, protocol)
dumps(obj, protocol)
dumps(obj, protocol)
dumps(obj, protocol)
dumps(obj, protocol)
dumps(obj, protocol)
dumps(obj, protocol)
dumps(obj, protocol)
dumps(obj, protocol)
dumps(obj, protocol)
dumps(obj, protocol)
dumps(obj, protocol)
dumps(obj, protocol)
dumps(obj, protocol)
dumps(obj, protocol)
dumps(obj, protocol)
dumps(obj, protocol)
dumps(obj, protocol)
dumps(obj, protocol)
dumps(obj, protocol)
return pyperf.perf_counter() - t0
def bench_unpickle(loops, pickle, options):
pickled_dict = pickle.dumps(DICT, options.protocol)
pickled_tuple = pickle.dumps(TUPLE, options.protocol)
pickled_dict_group = pickle.dumps(DICT_GROUP, options.protocol)
range_it = range(loops)
# micro-optimization: use fast local variables
loads = pickle.loads
objs = (pickled_dict, pickled_tuple, pickled_dict_group)
t0 = pyperf.perf_counter()
for _ in range_it:
for obj in objs:
# 20 loads dict
loads(obj)
loads(obj)
loads(obj)
loads(obj)
loads(obj)
loads(obj)
loads(obj)
loads(obj)
loads(obj)
loads(obj)
loads(obj)
loads(obj)
loads(obj)
loads(obj)
loads(obj)
loads(obj)
loads(obj)
loads(obj)
loads(obj)
loads(obj)
return pyperf.perf_counter() - t0
LIST = [[list(range(10)), list(range(10))] for _ in range(10)]
def bench_pickle_list(loops, pickle, options):
range_it = range(loops)
# micro-optimization: use fast local variables
dumps = pickle.dumps
obj = LIST
protocol = options.protocol
t0 = pyperf.perf_counter()
for _ in range_it:
# 10 dumps list
dumps(obj, protocol)
dumps(obj, protocol)
dumps(obj, protocol)
dumps(obj, protocol)
dumps(obj, protocol)
dumps(obj, protocol)
dumps(obj, protocol)
dumps(obj, protocol)
dumps(obj, protocol)
dumps(obj, protocol)
return pyperf.perf_counter() - t0
def bench_unpickle_list(loops, pickle, options):
pickled_list = pickle.dumps(LIST, options.protocol)
range_it = range(loops)
# micro-optimization: use fast local variables
loads = pickle.loads
t0 = pyperf.perf_counter()
for _ in range_it:
# 10 loads list
loads(pickled_list)
loads(pickled_list)
loads(pickled_list)
loads(pickled_list)
loads(pickled_list)
loads(pickled_list)
loads(pickled_list)
loads(pickled_list)
loads(pickled_list)
loads(pickled_list)
return pyperf.perf_counter() - t0
MICRO_DICT = {key: dict.fromkeys(range(10)) for key in range(100)}
def bench_pickle_dict(loops, pickle, options):
range_it = range(loops)
# micro-optimization: use fast local variables
protocol = options.protocol
obj = MICRO_DICT
t0 = pyperf.perf_counter()
for _ in range_it:
# 5 dumps dict
pickle.dumps(obj, protocol)
pickle.dumps(obj, protocol)
pickle.dumps(obj, protocol)
pickle.dumps(obj, protocol)
pickle.dumps(obj, protocol)
return pyperf.perf_counter() - t0
BENCHMARKS = {
# 20 inner-loops: don't count the 3 pickled objects
'pickle': (bench_pickle, 20),
# 20 inner-loops: don't count the 3 unpickled objects
'unpickle': (bench_unpickle, 20),
'pickle_list': (bench_pickle_list, 10),
'unpickle_list': (bench_unpickle_list, 10),
'pickle_dict': (bench_pickle_dict, 5),
}
def is_module_accelerated(module):
return getattr(pickle.Pickler, '__module__', '<jython>') == 'pickle'
def add_cmdline_args(cmd, args):
if args.pure_python:
cmd.append("--pure-python")
cmd.extend(("--protocol", str(args.protocol)))
cmd.append(args.benchmark)
if __name__ == "__main__":
runner = pyperf.Runner(add_cmdline_args=add_cmdline_args)
runner.metadata['description'] = "Test the performance of pickling."
parser = runner.argparser
parser.add_argument("--pure-python", action="store_true",
help="Use the C version of pickle.")
parser.add_argument("--protocol", action="store", default=None, type=int,
help="Which protocol to use (default: highest protocol).")
benchmarks = sorted(BENCHMARKS)
parser.add_argument("benchmark", choices=benchmarks)
options = runner.parse_args()
benchmark, inner_loops = BENCHMARKS[options.benchmark]
name = options.benchmark
if options.pure_python:
name += "_pure_python"
if (options.pure_python or IS_PYPY):
sys.modules['_pickle'] = None
if not is_module_accelerated(pickle):
raise RuntimeError("Unexpected C accelerators for pickle")
else:
if is_module_accelerated(pickle):
raise RuntimeError("Missing C accelerators for pickle")
# C accelerators are enabled by default on 3.x
import pickle
if options.protocol is None:
options.protocol = pickle.HIGHEST_PROTOCOL
runner.metadata['pickle_protocol'] = str(options.protocol)
runner.metadata['pickle_module'] = pickle.__name__
runner.bench_time_func(name, benchmark,
pickle, options, inner_loops=inner_loops)
| 28.587413
| 82
| 0.629036
|
939bd6e7476c81b3dc62c3e6627a94f14a0811d7
| 375
|
py
|
Python
|
backend/backend/accounts/migrations/0003_auto_20201116_0136.py
|
mightykim91/howaboutme
|
467c3a2eccc959084296bc7f4679e77b93b9d7f7
|
[
"Unlicense"
] | null | null | null |
backend/backend/accounts/migrations/0003_auto_20201116_0136.py
|
mightykim91/howaboutme
|
467c3a2eccc959084296bc7f4679e77b93b9d7f7
|
[
"Unlicense"
] | null | null | null |
backend/backend/accounts/migrations/0003_auto_20201116_0136.py
|
mightykim91/howaboutme
|
467c3a2eccc959084296bc7f4679e77b93b9d7f7
|
[
"Unlicense"
] | null | null | null |
# Generated by Django 3.1.2 on 2020-11-15 16:36
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('accounts', '0002_user_like'),
]
operations = [
migrations.AlterField(
model_name='user',
name='name',
field=models.CharField(max_length=150),
),
]
| 19.736842
| 51
| 0.586667
|
794d6436b61ab02e0d8f6de8d32f421a23a8a200
| 11,749
|
py
|
Python
|
Lib/test/test_py_compile.py
|
oleksandr-pavlyk/cpython
|
eb002dbe0da9622245a355db5f0cd5aa2fc70b40
|
[
"0BSD"
] | 5
|
2021-12-03T23:11:53.000Z
|
2022-01-08T21:02:50.000Z
|
Lib/test/test_py_compile.py
|
dalakatt/cpython
|
2f49b97cc5426087b46515254b9a97a22ee8c807
|
[
"0BSD"
] | 8
|
2022-01-07T11:31:11.000Z
|
2022-03-04T00:07:16.000Z
|
Lib/test/test_py_compile.py
|
dalakatt/cpython
|
2f49b97cc5426087b46515254b9a97a22ee8c807
|
[
"0BSD"
] | 3
|
2017-10-18T09:35:14.000Z
|
2018-09-09T16:40:13.000Z
|
import functools
import importlib.util
import os
import py_compile
import shutil
import stat
import subprocess
import sys
import tempfile
import unittest
from test import support
from test.support import os_helper, script_helper
def without_source_date_epoch(fxn):
"""Runs function with SOURCE_DATE_EPOCH unset."""
@functools.wraps(fxn)
def wrapper(*args, **kwargs):
with os_helper.EnvironmentVarGuard() as env:
env.unset('SOURCE_DATE_EPOCH')
return fxn(*args, **kwargs)
return wrapper
def with_source_date_epoch(fxn):
"""Runs function with SOURCE_DATE_EPOCH set."""
@functools.wraps(fxn)
def wrapper(*args, **kwargs):
with os_helper.EnvironmentVarGuard() as env:
env['SOURCE_DATE_EPOCH'] = '123456789'
return fxn(*args, **kwargs)
return wrapper
# Run tests with SOURCE_DATE_EPOCH set or unset explicitly.
class SourceDateEpochTestMeta(type(unittest.TestCase)):
def __new__(mcls, name, bases, dct, *, source_date_epoch):
cls = super().__new__(mcls, name, bases, dct)
for attr in dir(cls):
if attr.startswith('test_'):
meth = getattr(cls, attr)
if source_date_epoch:
wrapper = with_source_date_epoch(meth)
else:
wrapper = without_source_date_epoch(meth)
setattr(cls, attr, wrapper)
return cls
class PyCompileTestsBase:
def setUp(self):
self.directory = tempfile.mkdtemp(dir=os.getcwd())
self.source_path = os.path.join(self.directory, '_test.py')
self.pyc_path = self.source_path + 'c'
self.cache_path = importlib.util.cache_from_source(self.source_path)
self.cwd_drive = os.path.splitdrive(os.getcwd())[0]
# In these tests we compute relative paths. When using Windows, the
# current working directory path and the 'self.source_path' might be
# on different drives. Therefore we need to switch to the drive where
# the temporary source file lives.
drive = os.path.splitdrive(self.source_path)[0]
if drive:
os.chdir(drive)
with open(self.source_path, 'w') as file:
file.write('x = 123\n')
def tearDown(self):
shutil.rmtree(self.directory)
if self.cwd_drive:
os.chdir(self.cwd_drive)
def test_absolute_path(self):
py_compile.compile(self.source_path, self.pyc_path)
self.assertTrue(os.path.exists(self.pyc_path))
self.assertFalse(os.path.exists(self.cache_path))
def test_do_not_overwrite_symlinks(self):
# In the face of a cfile argument being a symlink, bail out.
# Issue #17222
try:
os.symlink(self.pyc_path + '.actual', self.pyc_path)
except (NotImplementedError, OSError):
self.skipTest('need to be able to create a symlink for a file')
else:
assert os.path.islink(self.pyc_path)
with self.assertRaises(FileExistsError):
py_compile.compile(self.source_path, self.pyc_path)
@unittest.skipIf(not os.path.exists(os.devnull) or os.path.isfile(os.devnull),
'requires os.devnull and for it to be a non-regular file')
def test_do_not_overwrite_nonregular_files(self):
# In the face of a cfile argument being a non-regular file, bail out.
# Issue #17222
with self.assertRaises(FileExistsError):
py_compile.compile(self.source_path, os.devnull)
def test_cache_path(self):
py_compile.compile(self.source_path)
self.assertTrue(os.path.exists(self.cache_path))
def test_cwd(self):
with os_helper.change_cwd(self.directory):
py_compile.compile(os.path.basename(self.source_path),
os.path.basename(self.pyc_path))
self.assertTrue(os.path.exists(self.pyc_path))
self.assertFalse(os.path.exists(self.cache_path))
def test_relative_path(self):
py_compile.compile(os.path.relpath(self.source_path),
os.path.relpath(self.pyc_path))
self.assertTrue(os.path.exists(self.pyc_path))
self.assertFalse(os.path.exists(self.cache_path))
@unittest.skipIf(hasattr(os, 'geteuid') and os.geteuid() == 0,
'non-root user required')
@unittest.skipIf(os.name == 'nt',
'cannot control directory permissions on Windows')
def test_exceptions_propagate(self):
# Make sure that exceptions raised thanks to issues with writing
# bytecode.
# http://bugs.python.org/issue17244
mode = os.stat(self.directory)
os.chmod(self.directory, stat.S_IREAD)
try:
with self.assertRaises(IOError):
py_compile.compile(self.source_path, self.pyc_path)
finally:
os.chmod(self.directory, mode.st_mode)
def test_bad_coding(self):
bad_coding = os.path.join(os.path.dirname(__file__), 'bad_coding2.py')
with support.captured_stderr():
self.assertIsNone(py_compile.compile(bad_coding, doraise=False))
self.assertFalse(os.path.exists(
importlib.util.cache_from_source(bad_coding)))
def test_source_date_epoch(self):
py_compile.compile(self.source_path, self.pyc_path)
self.assertTrue(os.path.exists(self.pyc_path))
self.assertFalse(os.path.exists(self.cache_path))
with open(self.pyc_path, 'rb') as fp:
flags = importlib._bootstrap_external._classify_pyc(
fp.read(), 'test', {})
if os.environ.get('SOURCE_DATE_EPOCH'):
expected_flags = 0b11
else:
expected_flags = 0b00
self.assertEqual(flags, expected_flags)
@unittest.skipIf(sys.flags.optimize > 0, 'test does not work with -O')
def test_double_dot_no_clobber(self):
# http://bugs.python.org/issue22966
# py_compile foo.bar.py -> __pycache__/foo.cpython-34.pyc
weird_path = os.path.join(self.directory, 'foo.bar.py')
cache_path = importlib.util.cache_from_source(weird_path)
pyc_path = weird_path + 'c'
head, tail = os.path.split(cache_path)
penultimate_tail = os.path.basename(head)
self.assertEqual(
os.path.join(penultimate_tail, tail),
os.path.join(
'__pycache__',
'foo.bar.{}.pyc'.format(sys.implementation.cache_tag)))
with open(weird_path, 'w') as file:
file.write('x = 123\n')
py_compile.compile(weird_path)
self.assertTrue(os.path.exists(cache_path))
self.assertFalse(os.path.exists(pyc_path))
def test_optimization_path(self):
# Specifying optimized bytecode should lead to a path reflecting that.
self.assertIn('opt-2', py_compile.compile(self.source_path, optimize=2))
def test_invalidation_mode(self):
py_compile.compile(
self.source_path,
invalidation_mode=py_compile.PycInvalidationMode.CHECKED_HASH,
)
with open(self.cache_path, 'rb') as fp:
flags = importlib._bootstrap_external._classify_pyc(
fp.read(), 'test', {})
self.assertEqual(flags, 0b11)
py_compile.compile(
self.source_path,
invalidation_mode=py_compile.PycInvalidationMode.UNCHECKED_HASH,
)
with open(self.cache_path, 'rb') as fp:
flags = importlib._bootstrap_external._classify_pyc(
fp.read(), 'test', {})
self.assertEqual(flags, 0b1)
def test_quiet(self):
bad_coding = os.path.join(os.path.dirname(__file__), 'bad_coding2.py')
with support.captured_stderr() as stderr:
self.assertIsNone(py_compile.compile(bad_coding, doraise=False, quiet=2))
self.assertIsNone(py_compile.compile(bad_coding, doraise=True, quiet=2))
self.assertEqual(stderr.getvalue(), '')
with self.assertRaises(py_compile.PyCompileError):
py_compile.compile(bad_coding, doraise=True, quiet=1)
class PyCompileTestsWithSourceEpoch(PyCompileTestsBase,
unittest.TestCase,
metaclass=SourceDateEpochTestMeta,
source_date_epoch=True):
pass
class PyCompileTestsWithoutSourceEpoch(PyCompileTestsBase,
unittest.TestCase,
metaclass=SourceDateEpochTestMeta,
source_date_epoch=False):
pass
class PyCompileCLITestCase(unittest.TestCase):
def setUp(self):
self.directory = tempfile.mkdtemp()
self.source_path = os.path.join(self.directory, '_test.py')
self.cache_path = importlib.util.cache_from_source(self.source_path)
with open(self.source_path, 'w') as file:
file.write('x = 123\n')
def tearDown(self):
os_helper.rmtree(self.directory)
@support.requires_subprocess()
def pycompilecmd(self, *args, **kwargs):
# assert_python_* helpers don't return proc object. We'll just use
# subprocess.run() instead of spawn_python() and its friends to test
# stdin support of the CLI.
if args and args[0] == '-' and 'input' in kwargs:
return subprocess.run([sys.executable, '-m', 'py_compile', '-'],
input=kwargs['input'].encode(),
capture_output=True)
return script_helper.assert_python_ok('-m', 'py_compile', *args, **kwargs)
def pycompilecmd_failure(self, *args):
return script_helper.assert_python_failure('-m', 'py_compile', *args)
def test_stdin(self):
result = self.pycompilecmd('-', input=self.source_path)
self.assertEqual(result.returncode, 0)
self.assertEqual(result.stdout, b'')
self.assertEqual(result.stderr, b'')
self.assertTrue(os.path.exists(self.cache_path))
def test_with_files(self):
rc, stdout, stderr = self.pycompilecmd(self.source_path, self.source_path)
self.assertEqual(rc, 0)
self.assertEqual(stdout, b'')
self.assertEqual(stderr, b'')
self.assertTrue(os.path.exists(self.cache_path))
def test_bad_syntax(self):
bad_syntax = os.path.join(os.path.dirname(__file__), 'badsyntax_3131.py')
rc, stdout, stderr = self.pycompilecmd_failure(bad_syntax)
self.assertEqual(rc, 1)
self.assertEqual(stdout, b'')
self.assertIn(b'SyntaxError', stderr)
def test_bad_syntax_with_quiet(self):
bad_syntax = os.path.join(os.path.dirname(__file__), 'badsyntax_3131.py')
rc, stdout, stderr = self.pycompilecmd_failure('-q', bad_syntax)
self.assertEqual(rc, 1)
self.assertEqual(stdout, b'')
self.assertEqual(stderr, b'')
def test_file_not_exists(self):
should_not_exists = os.path.join(os.path.dirname(__file__), 'should_not_exists.py')
rc, stdout, stderr = self.pycompilecmd_failure(self.source_path, should_not_exists)
self.assertEqual(rc, 1)
self.assertEqual(stdout, b'')
self.assertIn(b'no such file or directory', stderr.lower())
def test_file_not_exists_with_quiet(self):
should_not_exists = os.path.join(os.path.dirname(__file__), 'should_not_exists.py')
rc, stdout, stderr = self.pycompilecmd_failure('-q', self.source_path, should_not_exists)
self.assertEqual(rc, 1)
self.assertEqual(stdout, b'')
self.assertEqual(stderr, b'')
if __name__ == "__main__":
unittest.main()
| 40.236301
| 97
| 0.637671
|
b60f40d1e1c9e034584ea409c44e9b084d421363
| 7,085
|
py
|
Python
|
test/integration/test_remote_files.py
|
fubar2/galaxy
|
2d363ea6a374d9339ed1eb55b5565f9bba3fcab1
|
[
"CC-BY-3.0"
] | null | null | null |
test/integration/test_remote_files.py
|
fubar2/galaxy
|
2d363ea6a374d9339ed1eb55b5565f9bba3fcab1
|
[
"CC-BY-3.0"
] | 2
|
2020-08-19T18:14:59.000Z
|
2020-08-20T01:19:12.000Z
|
test/integration/test_remote_files.py
|
CloudVE/galaxy
|
002fac90618529c53c11ec846566ca438a7e02cf
|
[
"CC-BY-3.0"
] | null | null | null |
import json
import operator
import os
import shutil
from tempfile import mkdtemp
from galaxy.exceptions import error_codes
from galaxy_test.base.api_asserts import assert_error_code_is, assert_error_message_contains
from galaxy_test.base.populators import DatasetPopulator
from galaxy_test.driver import integration_util
SCRIPT_DIRECTORY = os.path.abspath(os.path.dirname(__file__))
FILE_SOURCES_JOB_CONF = os.path.join(SCRIPT_DIRECTORY, "file_sources_conf.yml")
USERNAME = 'user--bx--psu--edu'
USER_EMAIL = 'user@bx.psu.edu'
class RemoteFilesIntegrationTestCase(integration_util.IntegrationTestCase):
@classmethod
def handle_galaxy_config_kwds(cls, config):
root = os.path.realpath(mkdtemp())
cls._test_driver.temp_directories.append(root)
cls.root = root
cls.library_dir = os.path.join(root, "library")
cls.user_library_dir = os.path.join(root, "user_library")
cls.ftp_upload_dir = os.path.join(root, "ftp")
config["library_import_dir"] = cls.library_dir
config["user_library_import_dir"] = cls.user_library_dir
config["ftp_upload_dir"] = cls.ftp_upload_dir
config["ftp_upload_site"] = "ftp://cow.com"
# driver_util sets this to False, though the Galaxy default is True.
# Restore default for these tests.
config["ftp_upload_purge"] = True
def setUp(self):
super(RemoteFilesIntegrationTestCase, self).setUp()
self.dataset_populator = DatasetPopulator(self.galaxy_interactor)
for d in [self.library_dir, self.user_library_dir, self.ftp_upload_dir]:
if os.path.exists(d):
shutil.rmtree(d)
os.mkdir(d)
def test_index(self):
index = self.galaxy_interactor.get("remote_files?target=importdir").json()
self._assert_index_empty(index)
_write_file_fixtures(self.root, self.library_dir)
index = self.galaxy_interactor.get("remote_files?target=importdir").json()
self._assert_index_matches_fixtures(index)
# Get a 404 if the directory doesn't exist.
index = self.galaxy_interactor.get("remote_files?target=userdir").json()
assert_error_code_is(index, error_codes.USER_OBJECT_NOT_FOUND)
users_dir = os.path.join(self.user_library_dir, USER_EMAIL)
os.mkdir(users_dir)
index = self.galaxy_interactor.get("remote_files?target=userdir").json()
self._assert_index_empty(index)
_write_file_fixtures(self.root, users_dir)
index = self.galaxy_interactor.get("remote_files?target=userdir").json()
self._assert_index_matches_fixtures(index)
index = self.galaxy_interactor.get("remote_files?target=userdir&format=jstree").json()
self._assert_index_matches_fixtures_jstree(index)
def test_fetch_from_import(self):
_write_file_fixtures(self.root, self.library_dir)
with self.dataset_populator.test_history() as history_id:
element = dict(src="url", url="gximport://a")
target = {
"destination": {"type": "hdas"},
"elements": [element],
}
targets = json.dumps([target])
payload = {
"history_id": history_id,
"targets": targets,
}
new_dataset = self.dataset_populator.fetch(payload, assert_ok=True).json()["outputs"][0]
content = self.dataset_populator.get_history_dataset_content(history_id, dataset=new_dataset)
assert content == "a\n", content
assert os.path.exists(os.path.join(self.library_dir, "a"))
def test_fetch_from_ftp(self):
ftp_dir = os.path.join(self.ftp_upload_dir, USER_EMAIL)
_write_file_fixtures(self.root, ftp_dir)
with self.dataset_populator.test_history() as history_id:
element = dict(src="url", url="gxftp://a")
target = {
"destination": {"type": "hdas"},
"elements": [element],
}
targets = json.dumps([target])
payload = {
"history_id": history_id,
"targets": targets,
}
new_dataset = self.dataset_populator.fetch(payload, assert_ok=True).json()["outputs"][0]
content = self.dataset_populator.get_history_dataset_content(history_id, dataset=new_dataset)
assert content == "a\n", content
assert not os.path.exists(os.path.join(ftp_dir, "a"))
def _assert_index_empty(self, index):
assert len(index) == 0
def _assert_index_matches_fixtures(self, index):
paths = map(operator.itemgetter("path"), index)
assert "a" in paths
assert "subdir1/c" in paths
def _assert_index_matches_fixtures_jstree(self, index):
a_file = index[0]
assert a_file["li_attr"]["full_path"] == "a"
subdir1 = index[1]
assert subdir1["type"] == "folder"
assert subdir1["state"]["disabled"]
assert subdir1["li_attr"]["full_path"] == "subdir1"
subdir1_children = subdir1["children"]
assert len(subdir1_children) == 2
c = subdir1_children[0]
assert c["li_attr"]["full_path"] == "subdir1/c"
class RemoteFilesNotConfiguredIntegrationTestCase(integration_util.IntegrationTestCase):
@classmethod
def handle_galaxy_config_kwds(cls, config):
config["library_import_dir"] = None
config["user_library_import_dir"] = None
config["ftp_upload_dir"] = None
def test_configuration_statuses(self):
importfiles = self.galaxy_interactor.get("remote_files?target=importdir")
assert_error_code_is(importfiles, error_codes.CONFIG_DOES_NOT_ALLOW)
assert_error_message_contains(importfiles, 'import directory')
importfiles = self.galaxy_interactor.get("remote_files?target=ftpdir")
assert_error_code_is(importfiles, error_codes.CONFIG_DOES_NOT_ALLOW)
assert_error_message_contains(importfiles, 'FTP directories')
importfiles = self.galaxy_interactor.get("remote_files?target=userdir")
assert_error_code_is(importfiles, error_codes.CONFIG_DOES_NOT_ALLOW)
assert_error_message_contains(importfiles, 'user directories')
# invalid request parameter waitwhat...
importfiles = self.galaxy_interactor.get("remote_files?target=waitwhat")
assert_error_code_is(importfiles, error_codes.USER_REQUEST_INVALID_PARAMETER)
def _write_file_fixtures(tmp, root):
if not os.path.exists(root):
os.mkdir(root)
os.symlink(os.path.join(tmp, "b"), os.path.join(root, "unsafe"))
with open(os.path.join(root, "a"), "w") as f:
f.write("a\n")
with open(os.path.join(tmp, "b"), "w") as f:
f.write("b\n")
subdir1 = os.path.join(root, "subdir1")
os.mkdir(subdir1)
with open(os.path.join(subdir1, "c"), "w") as f:
f.write("c\n")
subdir2 = os.path.join(subdir1, "subdir2")
os.mkdir(subdir2)
with open(os.path.join(subdir2, "d"), "w") as f:
f.write("d\n")
return tmp, root
| 39.581006
| 105
| 0.668313
|
de126ca073811e55758c3de646079c009a0af5ef
| 3,068
|
py
|
Python
|
setup.py
|
rserran/FLAML
|
7d6822aa40883550e72c4ee24adb765c6e937ce7
|
[
"MIT"
] | null | null | null |
setup.py
|
rserran/FLAML
|
7d6822aa40883550e72c4ee24adb765c6e937ce7
|
[
"MIT"
] | null | null | null |
setup.py
|
rserran/FLAML
|
7d6822aa40883550e72c4ee24adb765c6e937ce7
|
[
"MIT"
] | null | null | null |
import setuptools
import os
here = os.path.abspath(os.path.dirname(__file__))
with open("README.md", "r", encoding="UTF-8") as fh:
long_description = fh.read()
# Get the code version
version = {}
with open(os.path.join(here, "flaml/version.py")) as fp:
exec(fp.read(), version)
__version__ = version["__version__"]
install_requires = [
"NumPy>=1.17.0rc1",
"lightgbm>=2.3.1",
"xgboost>=0.90",
"scipy>=1.4.1",
"pandas>=1.1.4",
"scikit-learn>=0.24",
]
setuptools.setup(
name="FLAML",
version=__version__,
author="Microsoft Corporation",
author_email="hpo@microsoft.com",
description="A fast library for automated machine learning and tuning",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/microsoft/FLAML",
packages=setuptools.find_packages(include=["flaml*"]),
package_data={
"flaml.default": ["*/*.json"],
},
include_package_data=True,
install_requires=install_requires,
extras_require={
"notebook": [
"openml==0.10.2",
"jupyter",
"matplotlib",
"rgf-python",
"catboost>=0.26",
],
"test": [
"flake8>=3.8.4",
"pytest>=6.1.1",
"coverage>=5.3",
"pre-commit",
"catboost>=0.26",
"rgf-python",
"optuna==2.8.0",
"vowpalwabbit",
"openml",
"statsmodels>=0.12.2",
"psutil==5.8.0",
"dataclasses",
"transformers>=4.14",
"datasets",
"torch",
"nltk",
"rouge_score",
"hcrystalball==0.1.10",
"seqeval",
"protobuf<4", # to prevent TypeError in ray
],
"catboost": ["catboost>=0.26"],
"blendsearch": ["optuna==2.8.0"],
"ray": [
"ray[tune]~=1.10",
"protobuf<4", # to prevent TypeError in ray
],
"azureml": [
"azureml-mlflow",
],
"nni": [
"nni",
],
"vw": [
"vowpalwabbit",
],
"nlp": [
"transformers>=4.14",
"datasets",
"torch",
"seqeval",
"nltk",
"rouge_score",
],
"ts_forecast": [
"holidays<0.14", # to prevent installation error for prophet
"prophet>=1.0.1",
"statsmodels>=0.12.2",
"hcrystalball==0.1.10",
],
"forecast": [
"holidays<0.14", # to prevent installation error for prophet
"prophet>=1.0.1",
"statsmodels>=0.12.2",
"hcrystalball==0.1.10",
],
"benchmark": ["catboost>=0.26", "psutil==5.8.0", "xgboost==1.3.3"],
},
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
python_requires=">=3.7",
)
| 26.678261
| 75
| 0.489896
|
1ad8e8179bdd27c92a847bee4e813ea9fa54a387
| 740
|
py
|
Python
|
yodatools/dataloader/controller/WizardDatabasePageController.py
|
ODM2/YODAParser
|
274a1fc5ed1810bc748a4ab108855254f8b9fc46
|
[
"BSD-3-Clause"
] | null | null | null |
yodatools/dataloader/controller/WizardDatabasePageController.py
|
ODM2/YODAParser
|
274a1fc5ed1810bc748a4ab108855254f8b9fc46
|
[
"BSD-3-Clause"
] | 21
|
2016-02-06T00:43:44.000Z
|
2018-02-02T20:22:05.000Z
|
yodatools/dataloader/controller/WizardDatabasePageController.py
|
ODM2/ODM2YODAParser
|
274a1fc5ed1810bc748a4ab108855254f8b9fc46
|
[
"BSD-3-Clause"
] | 1
|
2017-07-06T18:42:22.000Z
|
2017-07-06T18:42:22.000Z
|
from yodatools.dataloader.view.WizardDatabasePageView import WizardDatabasePageView
import os
class WizardDatabasePageController(WizardDatabasePageView):
def __init__(self, parent, title=''):
super(WizardDatabasePageController, self).__init__(parent)
del self.panel.choices['SQLite']
self.panel.cbDatabaseType.SetItems(self.panel.choices.keys())
self.panel.cbDatabaseType.SetStringSelection(os.getenv('DB_ENGINE', ''))
self.panel.txtServer.SetValue(os.getenv('DB_HOST', ''))
self.panel.txtUser.SetValue(os.getenv('DB_USER', ''))
self.panel.txtDBName.SetValue(os.getenv('DB_NAME', ''))
self.panel.txtPass.SetValue(os.getenv('DB_PWORD', ''))
self.title = title
| 38.947368
| 83
| 0.710811
|
38e4c2c20a76b2cf6a9f2ff52cc59e63d7338f12
| 2,438
|
py
|
Python
|
views/config.py
|
dev-easyshares/mighty
|
a6cf473fb8cfbf5b92db68c7b068fc8ae2911b8b
|
[
"MIT"
] | null | null | null |
views/config.py
|
dev-easyshares/mighty
|
a6cf473fb8cfbf5b92db68c7b068fc8ae2911b8b
|
[
"MIT"
] | 1
|
2022-03-12T00:57:37.000Z
|
2022-03-12T00:57:37.000Z
|
views/config.py
|
dev-easyshares/mighty
|
a6cf473fb8cfbf5b92db68c7b068fc8ae2911b8b
|
[
"MIT"
] | null | null | null |
from django.core.exceptions import ObjectDoesNotExist
from django.http import JsonResponse, Http404
from mighty.functions import setting
from mighty.views.template import TemplateView
from mighty.views.crud import ListView, DetailView
from mighty.apps import MightyConfig as conf
from mighty.models import ConfigClient, ConfigSimple
from mighty.applications.twofactor.apps import TwofactorConfig
from mighty.applications.nationality.apps import NationalityConfig
from mighty.applications.user import get_form_fields
base_config = {
'base': {
'logo': conf.logo,
'email': TwofactorConfig.method.email,
'sms': TwofactorConfig.method.sms,
'basic': TwofactorConfig.method.basic,
'languages': NationalityConfig.availables,
'fields': get_form_fields(),
}}
base_config.update(setting('BASE_CONFIG', {}))
# Return the base config of mighty
class Config(TemplateView):
def get_config(self):
return base_config
def get_context_data(self, **kwargs):
return self.get_config()
def render_to_response(self, context, **response_kwargs):
return JsonResponse(context, **response_kwargs)
# Return all configs in model ConfigClient
class ConfigListView(ListView):
model = ConfigClient
def get_queryset(self):
return [ConfigClient.objects.filter(is_disable=False), ConfigSimple.objects.filter(is_disable=False)]
def render_to_response(self, context):
cfg = base_config
if 'mighty.applications.nationality' in setting('INSTALLED_APPS'):
from mighty.applications.nationality import conf_prefix_numbering
cfg.update({"phones": conf_prefix_numbering()})
for cfgs in context['object_list']:
cfg.update({cfg.url_name: cfg.config for cfg in cfgs})
return JsonResponse(cfg)
# Return a named Config
class ConfigDetailView(DetailView):
model = ConfigClient
def get_config(self):
try:
return ConfigClient.objects.get(url_name=self.kwargs.get('name'))
except ConfigClient.DoesNotExist:
return ConfigSimple.objects.get(url_name=self.kwargs.get('name'))
def get_object(self, queryset=None):
try:
return self.get_config()
except ObjectDoesNotExist:
raise Http404
def render_to_response(self, context):
cfg = self.get_object()
return JsonResponse({cfg.name: cfg.config})
| 35.333333
| 109
| 0.7137
|
83fe275186397f48c02c3fe1d98cbee2351c1b36
| 957
|
py
|
Python
|
NU-CS5001/lab02/weather.py
|
zahraaliaghazadeh/python
|
2f2d0141a916c99e8724f803bd4e5c7246a7a02e
|
[
"MIT"
] | null | null | null |
NU-CS5001/lab02/weather.py
|
zahraaliaghazadeh/python
|
2f2d0141a916c99e8724f803bd4e5c7246a7a02e
|
[
"MIT"
] | null | null | null |
NU-CS5001/lab02/weather.py
|
zahraaliaghazadeh/python
|
2f2d0141a916c99e8724f803bd4e5c7246a7a02e
|
[
"MIT"
] | null | null | null |
# Note date now is 9/21/2021 , and the location Seattle WA
# is used to answer the questions
# What is the difference between the highest and the lowest temperature values
# predicted for the 10 day forecast?
highest = 73
lowest = 55
print("The difference of highest and lowest temp predicted for the 10 day forecast is {}"
.format(highest-lowest))
print("-"*50)
# What is the average temperature at noon predicted for the 10 day
# forecast?
data1 = [61, 63, 62, 61, 59, 60, 64, 65, 65, 63]
average = sum(data1)/10
print("Average temoerature at noon predicted for the 10 day forecast is: {} F"
.format(average))
print()
print("-"*50)
# What is the highest temperature predicted for the 10 day forecast,
# converted from Fahrenheit to Celsius?
data2 = [69, 64, 62, 63, 62, 68, 70, 73, 72, 77]
high = max(data2)
high_celcius = (high - 32) * (5/9)
print("The highest temperature predicted for the next 10 days forecast is: {}"
.format(high_celcius))
| 29.90625
| 89
| 0.716823
|
d37940c738af61cb5ab647d0dbd43502f76204e7
| 1,553
|
py
|
Python
|
config/urls.py
|
caseydm/militaryhomes
|
25dd2a2d1f85bec5c9200e0961e8a2aacd82fd03
|
[
"MIT"
] | null | null | null |
config/urls.py
|
caseydm/militaryhomes
|
25dd2a2d1f85bec5c9200e0961e8a2aacd82fd03
|
[
"MIT"
] | null | null | null |
config/urls.py
|
caseydm/militaryhomes
|
25dd2a2d1f85bec5c9200e0961e8a2aacd82fd03
|
[
"MIT"
] | null | null | null |
from django.conf import settings
from django.conf.urls import include, url
from django.conf.urls.static import static
from django.contrib import admin
from django.views.generic import TemplateView
from django.views import defaults as default_views
urlpatterns = [
url(r'^$', TemplateView.as_view(template_name='pages/home.html'), name='home'),
url(r'^about/$', TemplateView.as_view(template_name='pages/about.html'), name='about'),
# Django Admin, use {% url 'admin:index' %}
url(settings.ADMIN_URL, admin.site.urls),
# User management
url(r'^users/', include('militaryhomes.users.urls', namespace='users')),
url(r'^accounts/', include('allauth.urls')),
# Your stuff: custom urls includes go here
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
if settings.DEBUG:
# This allows the error pages to be debugged during development, just visit
# these url in browser to see how these error pages look like.
urlpatterns += [
url(r'^400/$', default_views.bad_request, kwargs={'exception': Exception('Bad Request!')}),
url(r'^403/$', default_views.permission_denied, kwargs={'exception': Exception('Permission Denied')}),
url(r'^404/$', default_views.page_not_found, kwargs={'exception': Exception('Page not Found')}),
url(r'^500/$', default_views.server_error),
]
if 'debug_toolbar' in settings.INSTALLED_APPS:
import debug_toolbar
urlpatterns = [
url(r'^__debug__/', include(debug_toolbar.urls)),
] + urlpatterns
| 40.868421
| 110
| 0.696072
|
a1e6521c8c7f9486d911e64cb5fe7ec77d24ccd8
| 695
|
py
|
Python
|
NEMbox/logger.py
|
hyskyder/musicbox
|
ec06a49cc59c683f7f5e69fad5097c34a8a7984c
|
[
"MIT"
] | null | null | null |
NEMbox/logger.py
|
hyskyder/musicbox
|
ec06a49cc59c683f7f5e69fad5097c34a8a7984c
|
[
"MIT"
] | null | null | null |
NEMbox/logger.py
|
hyskyder/musicbox
|
ec06a49cc59c683f7f5e69fad5097c34a8a7984c
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Author: omi
# @Date: 2014-08-24 21:51:57
from __future__ import (
print_function, unicode_literals, division, absolute_import
)
import logging
from future.builtins import open
from . import const
FILE_NAME = const.Constant.log_path
with open(FILE_NAME, 'a+') as f:
f.write('#' * 80)
f.write('\n')
def getLogger(name):
log = logging.getLogger(name)
log.setLevel(logging.INFO)
# File output handler
fh = logging.FileHandler(FILE_NAME)
fh.setLevel(logging.DEBUG)
fh.setFormatter(logging.Formatter('%(asctime)s - %(levelname)s - %(name)s:%(lineno)s: %(message)s'))
log.addHandler(fh)
return log
| 21.060606
| 104
| 0.673381
|
a5799dd45d794b94ea3a9db12f3ea6ae19c07f11
| 1,130
|
py
|
Python
|
amazon_asin_fetcher/amazon_asin_fetcher/spiders/asin_spider.py
|
turboLJY/amazon-reviews-scrapy
|
31278183a1530ea1a8f2e27f3d85dfbd4848354c
|
[
"MIT"
] | 4
|
2020-04-19T08:17:03.000Z
|
2022-02-17T05:00:03.000Z
|
amazon_asin_fetcher/amazon_asin_fetcher/spiders/asin_spider.py
|
turboLJY/amazon-reviews-scrapy
|
31278183a1530ea1a8f2e27f3d85dfbd4848354c
|
[
"MIT"
] | null | null | null |
amazon_asin_fetcher/amazon_asin_fetcher/spiders/asin_spider.py
|
turboLJY/amazon-reviews-scrapy
|
31278183a1530ea1a8f2e27f3d85dfbd4848354c
|
[
"MIT"
] | 1
|
2020-09-01T00:44:01.000Z
|
2020-09-01T00:44:01.000Z
|
# *-* coding: utf-8 *-*
"""
Created on: 5-Jul-2018
@author: Ai
"""
import scrapy
from googletrans import Translator
class ASINSpider(scrapy.Spider):
name = "asin"
def __init__(self, store=None, key=None, *args, **kwargs):
super(ASINSpider, self).__init__(*args, **kwargs)
self.store = store
self.key = key
if not store:
raise Exception('store is required')
self.start_urls = ['https://www.{0}/s/?keywords={1}'.format(self.store, self.key)]
def parse(self, response):
for item in response.css('li.s-result-item'):
if item.css('h2::attr(data-attribute)').extract_first() is not None:
gt = Translator()
yield {
'ASIN': item.css('li::attr(data-asin)').extract_first(),
'Product': gt.translate(item.css('h2::attr(data-attribute)').extract_first()).text
}
next_page = response.css('a[id="pagnNextLink"]::attr(href)').extract_first()
if next_page is not None:
yield response.follow(next_page, callback=self.parse)
| 32.285714
| 106
| 0.574336
|
150ff08a6742385ae301ebc0a725eae9f721682f
| 4,487
|
py
|
Python
|
server/app/outputs/dmx.py
|
BasementCat/audio-reactive-led-strip
|
acbfd3709ecf3f970c604045bb62da0b47661330
|
[
"MIT"
] | 1
|
2020-05-14T06:27:34.000Z
|
2020-05-14T06:27:34.000Z
|
server/app/outputs/dmx.py
|
BasementCat/audio-reactive-led-strip
|
acbfd3709ecf3f970c604045bb62da0b47661330
|
[
"MIT"
] | null | null | null |
server/app/outputs/dmx.py
|
BasementCat/audio-reactive-led-strip
|
acbfd3709ecf3f970c604045bb62da0b47661330
|
[
"MIT"
] | null | null | null |
import os
import glob
import logging
import threading
import time
import subprocess
import re
from dmxpy.DmxPy import DmxPy
from app import Task
from app.lib.misc import FPSCounter
logger = logging.getLogger(__name__)
hexint = lambda v: int(v, 16)
def find_device_file__linux(vendor, product):
if not os.path.exists('/sys') or not os.path.isdir('/sys'):
return None
for dev in glob.glob('/sys/bus/usb-serial/devices/*'):
devname = os.path.basename(dev)
with open(os.path.join(dev, '../uevent'), 'r') as fp:
for line in fp:
line = line.strip()
if line and '=' in line:
param, value = line.split('=')
if param == 'PRODUCT':
testvendor, testproduct = map(hexint, value.split('/')[:2])
if testvendor == vendor and testproduct == product:
return os.path.join('/dev', devname)
def find_device_file__macos(vendor, product):
devices = []
curdevice = {}
res = subprocess.check_output(['ioreg', '-p', 'IOUSB', '-l', '-b']).decode('utf-8')
for line in res.split('\n'):
line = line.strip()
if not line:
continue
match = re.match(u'^\+-o (.+)\s+<', line)
if match:
if curdevice:
devices.append(curdevice)
curdevice = {}
continue
match = re.match(u'^[\|\s]*"([\w\d\s]+)"\s+=\s+(.+)$', line)
if match:
k, v = match.groups()
if v.startswith('"'):
v = v[1:-1]
else:
try:
v = int(v)
except:
pass
curdevice[k] = v
if curdevice:
devices.append(curdevice)
for d in devices:
if d.get('idVendor') == vendor and d.get('idProduct') == product:
return '/dev/tty.usbserial-' + d['USB Serial Number']
def find_device_file(name):
# Name is either a path (/dev/ttyUSB0) which might change, or a device ID (0403:6001) which does not
if name.startswith('/') or ':' not in name:
# Assume file
return name
if ':' not in name:
raise ValueError(f"Not a valid device ID: {name}")
vendor, product = map(hexint, name.split(':'))
for fn in (find_device_file__linux, find_device_file__macos):
try:
file = fn(vendor, product)
if file:
return file
except:
logger.debug("Failure in find device file", exc_info=True)
raise RuntimeError(f"Can't find USB device {name}")
class DMX(Task):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
if not self.config.get('DMX_DEVICE'):
raise ValueError("No DMX_DEVICE in config")
self.dmx = None
self.dmx_lock = threading.Lock()
self.dmx_attempt = None
self.delay = 1.0 / float(self.config.get('FPS', 60))
self.last_send = 0
self.fps = FPSCounter('DMX')
self.get_dmx()
def get_dmx(self):
if not self.dmx and self.config.get('DMX_DEVICE') != 'sink':
if self.dmx_attempt is None or time.time() - self.dmx_attempt > 1:
self.dmx_attempt = time.time()
if not self.config.get('DMX_DEVICE'):
if self.config.get('DMX_DEVICE') is None:
logger.error("No DMX device configured")
self.config['DMX_DEVICE'] = False
return
with self.dmx_lock:
try:
self.dmx = DmxPy(find_device_file(self.config['DMX_DEVICE']))
except:
logger.error("Can't open DMX device %s", self.config['DMX_DEVICE'], exc_info=True)
return self.dmx
def run(self, data):
dmx = self.get_dmx()
if dmx:
if data.get('dmx_force'):
with self.fps:
for chan, val in data['dmx_force'].items():
dmx.setChannel(chan, val)
dmx.render()
if data.get('dmx'):
for chan, val in data['dmx'].items():
dmx.setChannel(chan, val)
if time.time() - self.last_send >= self.delay:
self.last_send = time.time()
with self.fps:
dmx.render()
| 31.377622
| 106
| 0.516158
|
0547e42bf8128476ba489e42c087078e17dd282b
| 1,325
|
py
|
Python
|
object-generation-using-gans/data/unaligned_dataset.py
|
sevmardi/ml-projects
|
0eb218c77cda61285cfcf599599ff28a8a8deba7
|
[
"MIT"
] | null | null | null |
object-generation-using-gans/data/unaligned_dataset.py
|
sevmardi/ml-projects
|
0eb218c77cda61285cfcf599599ff28a8a8deba7
|
[
"MIT"
] | 7
|
2020-06-06T01:26:08.000Z
|
2022-02-10T11:26:58.000Z
|
object-generation-using-gans/data/unaligned_dataset.py
|
sevmardi/ml-projects
|
0eb218c77cda61285cfcf599599ff28a8a8deba7
|
[
"MIT"
] | null | null | null |
import os.path
import torchvision.transforms as transforms
from data.base_dataset import BaseDataset, get_transform
from data.image_folder import make_dataset
from PIL import Image
import PIL
from pdb import set_trace as st
class UnalignedDataset(BaseDataset):
def initialize(self, opt):
self.opt = opt
self.root = opt.dataroot
self.dir_A = os.path.join(opt.dataroot, opt.phase + 'A')
self.dir_B = os.path.join(opt.dataroot, opt.phase + 'B')
self.A_paths = make_dataset(self.dir_A)
self.B_paths = make_dataset(self.dir_B)
self.A_paths = sorted(self.A_paths)
self.B_paths = sorted(self.B_paths)
self.A_size = len(self.A_paths)
self.B_size = len(self.B_paths)
self.transform = get_transform(opt)
def __getitem__(self, index):
A_path = self.A_paths[index % self.A_size]
B_path = self.B_paths[index % self.B_size]
A_img = Image.open(A_path).convert('RGB')
B_img = Image.open(B_path).convert('RGB')
A_img = self.transform(A_img)
B_img = self.transform(B_img)
return {'A': A_img, 'B': B_img,
'A_paths': A_path, 'B_paths': B_path}
def __len__(self):
return max(self.A_size, self.B_size)
def name(self):
return 'UnalignedDataset'
| 29.444444
| 64
| 0.649811
|
c948f63b55fa433ae8b7ee45a10eedd007753508
| 7,048
|
py
|
Python
|
smith/modeling_test.py
|
egonrian/google-research
|
8177adbe9ca0d7e5a9463b54581fe6dd27be0974
|
[
"Apache-2.0"
] | 3
|
2021-01-18T04:46:49.000Z
|
2021-03-05T09:21:40.000Z
|
smith/modeling_test.py
|
Alfaxad/google-research
|
2c0043ecd507e75e2df9973a3015daf9253e1467
|
[
"Apache-2.0"
] | 7
|
2021-11-10T19:44:38.000Z
|
2022-02-10T06:48:39.000Z
|
smith/modeling_test.py
|
Alfaxad/google-research
|
2c0043ecd507e75e2df9973a3015daf9253e1467
|
[
"Apache-2.0"
] | 4
|
2021-02-08T10:25:45.000Z
|
2021-04-17T14:46:26.000Z
|
# coding=utf-8
# Copyright 2020 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import tempfile
from absl import flags
import numpy as np
import tensorflow.compat.v1 as tf
from smith import constants
from smith import experiment_config_pb2
from smith import modeling
FLAGS = flags.FLAGS
class ModelingTest(tf.test.TestCase):
def setUp(self):
super(ModelingTest, self).setUp()
bert_config = {
"attention_probs_dropout_prob": 0.1,
"hidden_act": "gelu",
"hidden_dropout_prob": 0.1,
"hidden_size": 16,
"initializer_range": 0.02,
"intermediate_size": 32,
"max_position_embeddings": 16,
"num_attention_heads": 2,
"num_hidden_layers": 2,
"type_vocab_size": 2,
"vocab_size": 9
}
with tempfile.NamedTemporaryFile(delete=False) as bert_config_writer:
bert_config_writer.write(json.dumps(bert_config).encode("utf-8"))
# Note that in practice the bert_config_file and doc_bert_config_file can
# be different.
bert_config_file = bert_config_writer.name
doc_bert_config_file = bert_config_writer.name
# Construct a dual_encoder_config for testing purpose.
dual_encoder_config = experiment_config_pb2.DualEncoderConfig()
encoder_config = dual_encoder_config.encoder_config
encoder_config.model_name = constants.MODEL_NAME_SMITH_DUAL_ENCODER
encoder_config.max_seq_length = 6
encoder_config.max_sent_length_by_word = 2
encoder_config.max_doc_length_by_sentence = 3
encoder_config.loop_sent_number_per_doc = 3
encoder_config.max_predictions_per_seq = 1
encoder_config.use_masked_sentence_lm_loss = True
encoder_config.max_masked_sent_per_doc = 2
encoder_config.bert_config_file = bert_config_file
encoder_config.doc_bert_config_file = doc_bert_config_file
# Set train_batch_size and eval_batch_size for the batch_size_static used
# in the build_smith_ca function.
train_eval_config = dual_encoder_config.train_eval_config
train_eval_config.train_batch_size = 1
train_eval_config.eval_batch_size = 1
self.dual_encoder_config = dual_encoder_config
self.train_mode = constants.TRAIN_MODE_JOINT_TRAIN
self.model_fn = modeling.model_fn_builder(
dual_encoder_config=dual_encoder_config,
train_mode=self.train_mode,
learning_rate=1e-5,
num_train_steps=100000,
num_warmup_steps=500,
use_tpu=False,
use_one_hot_embeddings=False,
debugging=True)
self.features = {
"input_ids_1": tf.constant([[0, 5, 5, 7, 1, 1]], dtype=tf.int32),
"input_mask_1": tf.constant([[1, 1, 1, 1, 1, 1]], dtype=tf.int32),
"masked_lm_positions_1": tf.constant([[3]], dtype=tf.int32),
"masked_lm_ids_1": tf.constant([[5]], dtype=tf.int32),
"masked_lm_weights_1": tf.constant([[1.0]], dtype=tf.float32),
"input_ids_2": tf.constant([[0, 4, 4, 7, 1, 1]], dtype=tf.int32),
"input_mask_2": tf.constant([[1, 1, 1, 1, 1, 1]], dtype=tf.int32),
"masked_lm_positions_2": tf.constant([[3]], dtype=tf.int32),
"masked_lm_ids_2": tf.constant([[4]], dtype=tf.int32),
"masked_lm_weights_2": tf.constant([[1.0]], dtype=tf.float32),
"documents_match_labels": tf.constant([[1.0]], dtype=tf.float32)
}
def test_build_smith_dual_encoder(self):
masked_lm_positions_1 = tf.constant([[0, 2, 5]], dtype=tf.int32)
masked_lm_ids_1 = tf.constant([[0, 5, 1]], dtype=tf.int32)
masked_lm_weights_1 = tf.constant([[1.0, 1.0, 1.0]], dtype=tf.float32)
masked_lm_positions_2 = tf.constant([[0, 2, 5]], dtype=tf.int32)
masked_lm_ids_2 = tf.constant([[0, 5, 1]], dtype=tf.int32)
masked_lm_weights_2 = tf.constant([[1.0, 1.0, 1.0]], dtype=tf.float32)
(masked_lm_loss_1, _,
masked_lm_example_loss_1, _,
_, _,
masked_sent_lm_loss_1, _,
_, _,
_, _, sequence_encoding_1,
_, _,
_, _,
_, siamese_loss, siamese_example_loss,
siamese_logits) = \
modeling.build_smith_dual_encoder(
dual_encoder_config=self.dual_encoder_config,
train_mode=self.train_mode,
is_training=True,
input_ids_1=self.features["input_ids_1"],
input_mask_1=self.features["input_mask_1"],
masked_lm_positions_1=masked_lm_positions_1,
masked_lm_ids_1=masked_lm_ids_1,
masked_lm_weights_1=masked_lm_weights_1,
input_ids_2=self.features["input_ids_2"],
input_mask_2=self.features["input_mask_2"],
masked_lm_positions_2=masked_lm_positions_2,
masked_lm_ids_2=masked_lm_ids_2,
masked_lm_weights_2=masked_lm_weights_2,
use_one_hot_embeddings=False,
documents_match_labels=self.features["documents_match_labels"])
with tf.Session() as sess:
sess.run([tf.global_variables_initializer()])
result_numpy = sess.run([
masked_lm_loss_1, masked_lm_example_loss_1, sequence_encoding_1,
siamese_loss, siamese_example_loss, siamese_logits,
masked_sent_lm_loss_1
])
self.assertEqual(result_numpy[0].shape, ())
self.assertDTypeEqual(result_numpy[0], np.float32)
self.assertEqual(result_numpy[1].shape, (1, 3))
self.assertDTypeEqual(result_numpy[1], np.float32)
self.assertEqual(result_numpy[2].shape, (1, 16))
self.assertDTypeEqual(result_numpy[2], np.float32)
self.assertEqual(result_numpy[3].shape, ())
self.assertDTypeEqual(result_numpy[3], np.float32)
self.assertEqual(result_numpy[4].shape, (1,))
self.assertDTypeEqual(result_numpy[4], np.float32)
self.assertEqual(result_numpy[5].shape, (1,))
self.assertDTypeEqual(result_numpy[5], np.float32)
self.assertEqual(result_numpy[6].shape, ())
self.assertDTypeEqual(result_numpy[6], np.float32)
def test_model_fn_builder_train(self):
self.model_fn(
features=self.features,
labels=None,
mode=tf.estimator.ModeKeys.TRAIN,
params=None)
def test_model_fn_builder_eval(self):
self.model_fn(
features=self.features,
labels=None,
mode=tf.estimator.ModeKeys.EVAL,
params=None)
def test_model_fn_builder_predict(self):
self.model_fn(
features=self.features,
labels=None,
mode=tf.estimator.ModeKeys.PREDICT,
params=None)
if __name__ == "__main__":
tf.test.main()
| 38.097297
| 77
| 0.690267
|
f3641bf7fb85f833b58ccd80f2cda096701aedbe
| 10,032
|
py
|
Python
|
misp_epo_policy.py
|
mohlcyber/MISP-ENS-ExpertRules
|
00a3558121c248f564007b78c20aef85cbc10dbc
|
[
"Apache-2.0"
] | 1
|
2020-11-09T00:24:09.000Z
|
2020-11-09T00:24:09.000Z
|
misp_epo_policy.py
|
mohlcyber/MISP-ENS-ExpertRules
|
00a3558121c248f564007b78c20aef85cbc10dbc
|
[
"Apache-2.0"
] | null | null | null |
misp_epo_policy.py
|
mohlcyber/MISP-ENS-ExpertRules
|
00a3558121c248f564007b78c20aef85cbc10dbc
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python3
# Written by mohlcyber v.0.2 12/08/2020
import os
import time
import sys
import requests
import json
import xml.etree.ElementTree as ET
import base64
import re
import logging
from pymisp import ExpandedPyMISP
requests.packages.urllib3.disable_warnings()
EPO_URL = 'https://1.1.1.1'
EPO_PORT = '8443'
EPO_USERNAME = 'admin'
EPO_PASSWORD = 'pass'
EPO_POLICY_NAME = 'Expert Rule Policy'
EPO_SIGNATURE_ID = '20000'
MISP_URL = 'https://2.2.2.2/'
MISP_KEY = 'api key'
MISP_VERIFY = False
MISP_TAG = 'McAfee: Update ENS Expert Rules'
HASH_FILE = 'exports/misp_hashes.txt'
MAXIMUM = 10
loglevel = 'INFO'
logger = logging.getLogger('logs')
logger.setLevel(loglevel)
consoleHandler = logging.StreamHandler()
consoleHandler.setLevel(loglevel)
logger.addHandler(consoleHandler)
formatter = logging.Formatter("%(asctime)s;%(levelname)s;%(message)s")
consoleHandler.setFormatter(formatter)
class MISP():
def __init__(self):
self.misp = ExpandedPyMISP(MISP_URL, MISP_KEY, MISP_VERIFY)
self.misp_hashes = []
def query(self):
try:
events = self.misp.search(tags=MISP_TAG)
if events:
for event in events:
eventid = str(event['Event']['id'])
for attributes in event['Event']['Attribute']:
if attributes['type'] == 'md5':
self.misp_hashes.append(attributes['value'])
for objects in event['Event']['Object']:
for attributes in objects['Attribute']:
if attributes['type'] == 'md5':
self.misp_hashes.append(attributes['value'])
self.misp.untag(event['Event']['uuid'], MISP_TAG)
logger.info('STATUS: Found {0} Hash in MISP Events that use tag {1}.'.format(str(len(self.misp_hashes)), MISP_TAG))
self.write_to_file()
return True
else:
return False
except Exception as error:
exc_type, exc_obj, exc_tb = sys.exc_info()
logger.info('ERROR: Error in {location}.{funct_name}() - line {line_no} : {error}'
.format(location=__name__, funct_name=sys._getframe().f_code.co_name, line_no=exc_tb.tb_lineno,
error=str(error)))
def write_to_file(self):
if os.path.exists(HASH_FILE):
tmp_dict = []
hashes = open(HASH_FILE, 'r')
hashes_read = hashes.read()
for line in hashes_read.split('\n'):
tmp_dict.append(line)
for hash in self.misp_hashes:
if hash not in tmp_dict:
tmp_dict.append(hash)
count = len(tmp_dict)
if count > MAXIMUM:
logger.info('ATTENTION: Maximum amount of hashes reached. Removing oldest.')
diff = count - MAXIMUM
s = slice(diff, None)
tmp_dict = tmp_dict[s]
os.remove(HASH_FILE)
hashes = open(HASH_FILE, 'w')
if count > MAXIMUM:
count = MAXIMUM
for hash in tmp_dict:
if count > 1:
hashes.write(hash + '\n')
else:
hashes.write(hash)
count -= 1
hashes.close()
else:
hashes = open(HASH_FILE, 'w')
count = len(self.misp_hashes)
if count > MAXIMUM:
logger.info('ATTENTION: Maximum amount of hashes reached. Removing oldest.')
diff = count - MAXIMUM
s = slice(diff, None)
self.misp_hashes = self.misp_hashes[s]
if count > MAXIMUM:
count = MAXIMUM
for hash in self.misp_hashes:
if count > 1:
hashes.write(hash + '\n')
else:
hashes.write(hash)
count -= 1
hashes.close()
class EPO():
def __init__(self):
self.epo_url = EPO_URL
self.epo_port = EPO_PORT
self.epo_verify = False
self.epo_user = EPO_USERNAME
self.epo_pw = EPO_PASSWORD
self.session = requests.Session()
self.policy = EPO_POLICY_NAME
self.expert_tmp = open('expert_tmp.txt', 'r').read()
self.expert_rule = ''
def request(self, option, **kwargs):
try:
kwargs.setdefault('auth', (self.epo_user, self.epo_pw))
kwargs.setdefault('verify', self.epo_verify)
kwargs.setdefault('params', {})
kwargs['params'][':output'] = 'json'
url = '{}:{}/remote/{}'.format(self.epo_url, self.epo_port, option)
if kwargs.get('data') or kwargs.get('json') or kwargs.get('files'):
res = self.session.post(url, **kwargs)
else:
res = self.session.get(url, **kwargs)
return res.status_code, res
except Exception as error:
exc_type, exc_obj, exc_tb = sys.exc_info()
logger.info('ERROR: Error in {location}.{funct_name}() - line {line_no} : {error}'
.format(location=__name__, funct_name=sys._getframe().f_code.co_name, line_no=exc_tb.tb_lineno,
error=str(error)))
def prep_xml(self):
org_xml = open('exports/policy_old.xml', 'r')
hashes = open(HASH_FILE, 'r').read()
tmp_hashes = []
for line in hashes.split('\n'):
tmp_hashes.append(line)
tree_org = ET.parse(org_xml)
root_org = tree_org.getroot()
tree_mod = ET.ElementTree()
root_mod = ET.Element('epo:EPOPolicySchema')
root_mod.attrib = {
'xmlns:epo': 'mcafee-epo-policy',
'xmlns:xsi': 'http://www.w3.org/2001/XMLSchema-instance'
}
for shema in root_org.iter('epo:EPOPolicySchema'):
root_mod.append(shema)
for info in root_org.iter('EPOPolicyVerInfo'):
root_mod.append(info)
for set in root_org.iter('EPOPolicySettings'):
if EPO_POLICY_NAME in set.attrib['name']:
if set.attrib['categoryid'] == 'EAM_BufferOverflow_Policies':
for setting in set.iter('Setting'):
if setting.attrib['name'] == 'SignatureID' and setting.attrib['value'] == EPO_SIGNATURE_ID:
for setting in set.iter('Setting'):
if 'SignatureContent' in setting.attrib['name']:
# org_payload = setting.attrib['value']
# enc_org_payload = (base64.b64decode(org_payload)).decode()
for line in self.expert_tmp.split('\n'):
md5_line = re.findall(r'.-v\s\x22', line)
if len(md5_line) > 0:
for hash in tmp_hashes:
nline = re.sub(r'(HASH)', hash, line)
self.expert_rule += nline + '\r\n'
else:
self.expert_rule += line + '\r\n'
setting.attrib['value'] = base64.b64encode(self.expert_rule.encode()).decode()
root_mod.append(set)
for obj in root_org.iter('EPOPolicyObject'):
if EPO_POLICY_NAME in obj.attrib['name']:
root_mod.append(obj)
tree_mod._setroot(root_mod)
tree_mod.write('exports/policy_new.xml', encoding='utf-8', xml_declaration=True)
def main():
misp = MISP()
logger.debug('STATUS: Starting to query MISP for Events with tag {0}.'.format(str(MISP_TAG)))
if misp.query() is False:
logger.debug('SUCCESS: No MISP Events found with tag {0}.'.format(MISP_TAG))
return
epo = EPO()
status, policy_res = epo.request('policy.find', data={'searchText': EPO_POLICY_NAME})
if status != 200:
logger.info('ERROR: Could not run ePO API request. Error: {} - {}'.format(str(status), policy_res))
return
policy_res_json = json.loads(policy_res.text.strip('OK:'))
if len(policy_res_json) > 1:
logger.info('ERROR: Found multiple policies with the same name. Please be more specific.')
return
elif len(policy_res_json) < 1:
logger.info('STATUS: Policy does not exist. Please create policy manually and assign to the right systems.')
return
else:
logger.debug('STATUS: Identified policy. Going to download, make changes and upload policy again.')
productId = policy_res_json[0]['productId']
status, policy_exp = epo.request('policy.export', params={'productId': productId})
if status != 200:
logger.info('ERROR: Could not export policy. Error: {} - {}'.format(str(status), policy_exp))
return
policy_exp_json = json.loads(policy_exp.text.strip('OK:'))
with open('exports/policy_old.xml', 'w') as output:
output.write(policy_exp_json)
output.close()
epo.prep_xml()
logger.debug('STATUS: Successfully made changes to the policy. Trying to upload.')
status, policy_import = epo.request('policy.importPolicy', params={'force': True},
files={'file': ('policy_new.xml', open('exports/policy_new.xml', 'rb'),
'multipart/form-data')})
if status != 200:
logger.info('ERROR: Could not import new policy. Error: {} - {}'.format(str(status), policy_import))
return
else:
logger.info('SUCCESS: Successful import new policy in ePO.')
if __name__ == '__main__':
while True:
main()
time.sleep(60)
| 36.48
| 131
| 0.546551
|
cab401857149e5c12cf56d504ec19f1639d115ee
| 37,217
|
py
|
Python
|
qiskit/optimization/algorithms/admm_optimizer.py
|
Cristian-Malinescu/qiskit-aqua
|
b29596800447c3130a20ec72a18b7fd8ed9fdb2f
|
[
"Apache-2.0"
] | null | null | null |
qiskit/optimization/algorithms/admm_optimizer.py
|
Cristian-Malinescu/qiskit-aqua
|
b29596800447c3130a20ec72a18b7fd8ed9fdb2f
|
[
"Apache-2.0"
] | null | null | null |
qiskit/optimization/algorithms/admm_optimizer.py
|
Cristian-Malinescu/qiskit-aqua
|
b29596800447c3130a20ec72a18b7fd8ed9fdb2f
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# This code is part of Qiskit.
#
# (C) Copyright IBM 2020.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
"""An implementation of the ADMM algorithm."""
import copy
import logging
import time
import warnings
from typing import List, Optional, Tuple
import numpy as np
from qiskit.aqua.algorithms import NumPyMinimumEigensolver
from .minimum_eigen_optimizer import MinimumEigenOptimizer
from .optimization_algorithm import OptimizationAlgorithm, OptimizationResult
from .slsqp_optimizer import SlsqpOptimizer
from ..problems.constraint import Constraint
from ..problems.linear_constraint import LinearConstraint
from ..problems.quadratic_objective import QuadraticObjective
from ..problems.quadratic_program import QuadraticProgram
from ..problems.variable import VarType, Variable
UPDATE_RHO_BY_TEN_PERCENT = 0
UPDATE_RHO_BY_RESIDUALS = 1
logger = logging.getLogger(__name__)
class ADMMParameters:
"""Defines a set of parameters for ADMM optimizer."""
def __init__(self,
rho_initial: float = 10000,
factor_c: float = 100000,
beta: float = 1000,
maxiter: int = 10,
tol: float = 1.e-4,
max_time: float = np.inf,
three_block: bool = True,
vary_rho: int = UPDATE_RHO_BY_TEN_PERCENT,
tau_incr: float = 2,
tau_decr: float = 2,
mu_res: float = 10,
mu_merit: float = 1000,
warm_start: bool = False,
max_iter: Optional[int] = None) -> None:
"""Defines parameters for ADMM optimizer and their default values.
Args:
rho_initial: Initial value of rho parameter of ADMM.
factor_c: Penalizing factor for equality constraints, when mapping to QUBO.
beta: Penalization for y decision variables.
maxiter: Maximum number of iterations for ADMM.
tol: Tolerance for the residual convergence.
max_time: Maximum running time (in seconds) for ADMM.
three_block: Boolean flag to select the 3-block ADMM implementation.
vary_rho: Flag to select the rule to update rho.
If set to 0, then rho increases by 10% at each iteration.
If set to 1, then rho is modified according to primal and dual residuals.
tau_incr: Parameter used in the rho update (UPDATE_RHO_BY_RESIDUALS).
The update rule can be found in:
Boyd, S., Parikh, N., Chu, E., Peleato, B., & Eckstein, J. (2011).
Distributed optimization and statistical learning via the alternating
direction method of multipliers.
Foundations and Trends® in Machine learning, 3(1), 1-122.
tau_decr: Parameter used in the rho update (UPDATE_RHO_BY_RESIDUALS).
mu_res: Parameter used in the rho update (UPDATE_RHO_BY_RESIDUALS).
mu_merit: Penalization for constraint residual. Used to compute the merit values.
warm_start: Start ADMM with pre-initialized values for binary and continuous variables
by solving a relaxed (all variables are continuous) problem first. This option does
not guarantee the solution will optimal or even feasible. The option should be
used when tuning other options does not help and should be considered as a hint
to the optimizer where to start its iterative process.
max_iter: Deprecated, use maxiter.
"""
super().__init__()
if max_iter is not None:
warnings.warn('The max_iter parameter is deprecated as of '
'0.8.0 and will be removed no sooner than 3 months after the release. '
'You should use maxiter instead.',
DeprecationWarning)
maxiter = max_iter
self.mu_merit = mu_merit
self.mu_res = mu_res
self.tau_decr = tau_decr
self.tau_incr = tau_incr
self.vary_rho = vary_rho
self.three_block = three_block
self.max_time = max_time
self.tol = tol
self.maxiter = maxiter
self.factor_c = factor_c
self.beta = beta
self.rho_initial = rho_initial
self.warm_start = warm_start
def __repr__(self) -> str:
props = ", ".join(["{}={}".format(key, value) for (key, value) in vars(self).items()])
return "{0}({1})".format(type(self).__name__, props)
class ADMMState:
"""Internal computation state of the ADMM implementation.
The state keeps track of various variables are stored that are being updated during problem
solving. The values are relevant to the problem being solved. The state is recreated for each
optimization problem. State is returned as the third value.
"""
def __init__(self,
op: QuadraticProgram,
rho_initial: float) -> None:
"""
Args:
op: The optimization problem being solved.
rho_initial: Initial value of the rho parameter.
"""
super().__init__()
# Optimization problem itself
self.op = op
# Indices of the variables
self.binary_indices = None # type: Optional[List[int]]
self.continuous_indices = None # type: Optional[List[int]]
self.step1_absolute_indices = None # type: Optional[List[int]]
self.step1_relative_indices = None # type: Optional[List[int]]
# define heavily used matrix, they are used at each iteration, so let's cache them,
# they are np.ndarrays
# pylint:disable=invalid-name
# objective
self.q0 = None # type: Optional[np.ndarray]
self.c0 = None # type: Optional[np.ndarray]
self.q1 = None # type: Optional[np.ndarray]
self.c1 = None # type: Optional[np.ndarray]
# constraints
self.a0 = None # type: Optional[np.ndarray]
self.b0 = None # type: Optional[np.ndarray]
# These are the parameters that are updated in the ADMM iterations.
self.u = np.zeros(op.get_num_continuous_vars())
binary_size = op.get_num_binary_vars()
self.x0 = np.zeros(binary_size)
self.z = np.zeros(binary_size)
self.z_init = self.z
self.y = np.zeros(binary_size)
self.lambda_mult = np.zeros(binary_size)
# The following structures store quantities obtained in each ADMM iteration.
self.cost_iterates = [] # type: List[float]
self.residuals = [] # type: List[float]
self.dual_residuals = [] # type: List[float]
self.cons_r = [] # type: List[float]
self.merits = [] # type: List[float]
self.lambdas = [] # type: List[float]
self.x0_saved = [] # type: List[np.ndarray]
self.u_saved = [] # type: List[np.ndarray]
self.z_saved = [] # type: List[np.ndarray]
self.y_saved = [] # type: List[np.ndarray]
self.rho = rho_initial
# lin. eq. constraints with bin. vars. only
self.binary_equality_constraints = [] # type: List[LinearConstraint]
# all equality constraints
self.equality_constraints = [] # type: List[Constraint]
# all inequality constraints
self.inequality_constraints = [] # type: List[Constraint]
class ADMMOptimizationResult(OptimizationResult):
""" ADMMOptimization Result."""
def __init__(self, x: np.ndarray, fval: float, variables: List[Variable],
state: ADMMState) -> None:
"""
Args:
x: the optimal value found by ADMM.
fval: the optimal function value.
variables: the list of variables of the optimization problem.
state: the internal computation state of ADMM.
"""
super().__init__(x=x, fval=fval, variables=variables, raw_results=state)
@property
def state(self) -> ADMMState:
""" returns state """
return self._raw_results
class ADMMOptimizer(OptimizationAlgorithm):
"""An implementation of the ADMM-based heuristic.
This algorithm is introduced in [1].
**References:**
[1] Gambella, C., & Simonetto, A. (2020). Multi-block ADMM Heuristics for Mixed-Binary
Optimization on Classical and Quantum Computers. arXiv preprint arXiv:2001.02069.
"""
def __init__(self, qubo_optimizer: Optional[OptimizationAlgorithm] = None,
continuous_optimizer: Optional[OptimizationAlgorithm] = None,
params: Optional[ADMMParameters] = None) -> None:
"""
Args:
qubo_optimizer: An instance of OptimizationAlgorithm that can effectively solve
QUBO problems. If not specified then :class:`MinimumEigenOptimizer` initialized
with an instance of :class:`NumPyMinimumEigensolver` will be used.
continuous_optimizer: An instance of OptimizationAlgorithm that can solve
continuous problems. If not specified then :class:`SlsqpOptimizer` will be used.
params: An instance of ADMMParameters.
"""
super().__init__()
self._log = logging.getLogger(__name__)
# create default params if not present
self._params = params or ADMMParameters()
# create optimizers if not specified
self._qubo_optimizer = qubo_optimizer or MinimumEigenOptimizer(NumPyMinimumEigensolver())
self._continuous_optimizer = continuous_optimizer or SlsqpOptimizer()
# internal state where we'll keep intermediate solution
# here, we just declare the class variable, the variable is initialized in kept in
# the solve method.
self._state = None # type: Optional[ADMMState]
def get_compatibility_msg(self, problem: QuadraticProgram) -> Optional[str]:
"""Checks whether a given problem can be solved with the optimizer implementing this method.
Args:
problem: The optimization problem to check compatibility.
Returns:
Returns True if the problem is compatible, otherwise raises an error.
Raises:
QiskitOptimizationError: If the problem is not compatible with the ADMM optimizer.
"""
msg = ''
# 1. get bin/int and continuous variable indices
bin_int_indices = self._get_variable_indices(problem, Variable.Type.BINARY)
continuous_indices = self._get_variable_indices(problem, Variable.Type.CONTINUOUS)
# 2. binary and continuous variables are separable in objective
for bin_int_index in bin_int_indices:
for continuous_index in continuous_indices:
coeff = problem.objective.quadratic[bin_int_index, continuous_index]
if coeff != 0:
# binary and continuous vars are mixed.
msg += 'Binary and continuous variables are not separable in the objective. '
# if an error occurred, return error message, otherwise, return None
return msg
def solve(self, problem: QuadraticProgram) -> ADMMOptimizationResult:
"""Tries to solves the given problem using ADMM algorithm.
Args:
problem: The problem to be solved.
Returns:
The result of the optimizer applied to the problem.
Raises:
QiskitOptimizationError: If the problem is not compatible with the ADMM optimizer.
"""
self._verify_compatibility(problem)
# debug
self._log.debug("Initial problem: %s", problem.export_as_lp_string())
# map integer variables to binary variables
from ..converters.integer_to_binary import IntegerToBinary
int2bin = IntegerToBinary()
original_variables = problem.variables
problem = int2bin.convert(problem)
# we deal with minimization in the optimizer, so turn the problem to minimization
problem, sense = self._turn_to_minimization(problem)
# create our computation state.
self._state = ADMMState(problem, self._params.rho_initial)
# parse problem and convert to an ADMM specific representation.
self._state.binary_indices = self._get_variable_indices(problem, Variable.Type.BINARY)
self._state.continuous_indices = self._get_variable_indices(problem,
Variable.Type.CONTINUOUS)
if self._params.warm_start:
# warm start injection for the initial values of the variables
self._warm_start(problem)
# convert optimization problem to a set of matrices and vector that are used
# at each iteration.
self._convert_problem_representation()
start_time = time.time()
# we have not stated our computations yet, so elapsed time initialized as zero.
elapsed_time = 0.0
iteration = 0
residual = 1.e+2
while (iteration < self._params.maxiter and residual > self._params.tol) \
and (elapsed_time < self._params.max_time):
if self._state.step1_absolute_indices:
op1 = self._create_step1_problem()
self._state.x0 = self._update_x0(op1)
# debug
self._log.debug("Step 1 sub-problem: %s", op1.export_as_lp_string())
# else, no binary variables exist, and no update to be done in this case.
# debug
self._log.debug("x0=%s", self._state.x0)
op2 = self._create_step2_problem()
self._state.u, self._state.z = self._update_x1(op2)
# debug
self._log.debug("Step 2 sub-problem: %s", op2.export_as_lp_string())
self._log.debug("u=%s", self._state.u)
self._log.debug("z=%s", self._state.z)
if self._params.three_block:
if self._state.binary_indices:
op3 = self._create_step3_problem()
self._state.y = self._update_y(op3)
# debug
self._log.debug("Step 3 sub-problem: %s", op3.export_as_lp_string())
# debug
self._log.debug("y=%s", self._state.y)
self._state.lambda_mult = self._update_lambda_mult()
# debug
self._log.debug("lambda: %s", self._state.lambda_mult)
cost_iterate = self._get_objective_value()
constraint_residual = self._get_constraint_residual()
residual, dual_residual = self._get_solution_residuals(iteration)
merit = self._get_merit(cost_iterate, constraint_residual)
# debug
self._log.debug("cost_iterate=%s, cr=%s, merit=%s",
cost_iterate, constraint_residual, merit)
# costs are saved with their original sign.
self._state.cost_iterates.append(cost_iterate)
self._state.residuals.append(residual)
self._state.dual_residuals.append(dual_residual)
self._state.cons_r.append(constraint_residual)
self._state.merits.append(merit)
self._state.lambdas.append(np.linalg.norm(self._state.lambda_mult))
self._state.x0_saved.append(self._state.x0)
self._state.u_saved.append(self._state.u)
self._state.z_saved.append(self._state.z)
self._state.z_saved.append(self._state.y)
self._update_rho(residual, dual_residual)
iteration += 1
elapsed_time = time.time() - start_time
binary_vars, continuous_vars, objective_value = self._get_best_merit_solution()
solution = self._revert_solution_indexes(binary_vars, continuous_vars)
# flip the objective sign again if required
objective_value = objective_value * sense
# convert back integer to binary
base_result = OptimizationResult(solution, objective_value, original_variables)
base_result = int2bin.interpret(base_result)
# third parameter is our internal state of computations.
result = ADMMOptimizationResult(x=base_result.x, fval=base_result.fval,
variables=base_result.variables,
state=self._state)
# debug
self._log.debug("solution=%s, objective=%s at iteration=%s",
solution, objective_value, iteration)
return result
@staticmethod
def _turn_to_minimization(problem: QuadraticProgram) -> Tuple[QuadraticProgram, float]:
"""
Turns the problem to `ObjSense.MINIMIZE` by flipping the sign of the objective function
if initially it is `ObjSense.MAXIMIZE`. Otherwise returns the original problem.
Args:
problem: a problem to turn to minimization.
Returns:
A copy of the problem if sign flip is required, otherwise the original problem and
the original sense of the problem in the numerical representation.
"""
sense = problem.objective.sense.value
if problem.objective.sense == QuadraticObjective.Sense.MAXIMIZE:
problem = copy.deepcopy(problem)
problem.objective.sense = QuadraticObjective.Sense.MINIMIZE
problem.objective.constant = (-1) * problem.objective.constant
problem.objective.linear = (-1) * problem.objective.linear.coefficients
problem.objective.quadratic = (-1) * problem.objective.quadratic.coefficients
return problem, sense
@staticmethod
def _get_variable_indices(op: QuadraticProgram, var_type: VarType) -> List[int]:
"""Returns a list of indices of the variables of the specified type.
Args:
op: Optimization problem.
var_type: type of variables to look for.
Returns:
List of indices.
"""
indices = []
for i, variable in enumerate(op.variables):
if variable.vartype == var_type:
indices.append(i)
return indices
def _get_current_solution(self) -> np.ndarray:
"""
Returns current solution of the problem.
Returns:
An array of the current solution.
"""
return self._revert_solution_indexes(self._state.x0, self._state.u)
def _revert_solution_indexes(self, binary_vars: np.ndarray, continuous_vars: np.ndarray) \
-> np.ndarray:
"""Constructs a solution array where variables are stored in the correct order.
Args:
binary_vars: solution for binary variables
continuous_vars: solution for continuous variables
Returns:
A solution array.
"""
solution = np.zeros(len(self._state.binary_indices) + len(self._state.continuous_indices))
# restore solution at the original index location
solution.put(self._state.binary_indices, binary_vars)
solution.put(self._state.continuous_indices, continuous_vars)
return solution
def _convert_problem_representation(self) -> None:
"""Converts problem representation into set of matrices and vectors."""
binary_var_indices = set(self._state.binary_indices)
# separate constraints
for l_constraint in self._state.op.linear_constraints:
if l_constraint.sense == Constraint.Sense.EQ:
self._state.equality_constraints.append(l_constraint)
# verify that there are only binary variables in the constraint
# this is to build A0, b0 in step 1
constraint_var_indices = set(l_constraint.linear.to_dict().keys())
if constraint_var_indices.issubset(binary_var_indices):
self._state.binary_equality_constraints.append(l_constraint)
elif l_constraint.sense in (Constraint.Sense.LE, Constraint.Sense.GE):
self._state.inequality_constraints.append(l_constraint)
# separate quadratic constraints into eq and non-eq
for q_constraint in self._state.op.quadratic_constraints:
if q_constraint.sense == Constraint.Sense.EQ:
self._state.equality_constraints.append(q_constraint)
elif q_constraint.sense in (Constraint.Sense.LE, Constraint.Sense.GE):
self._state.inequality_constraints.append(q_constraint)
# separately keep binary variables that are for step 1 only
# temp variables are due to limit of 100 chars per line
step1_absolute_indices, step1_relative_indices = self._get_step1_indices()
self._state.step1_absolute_indices = step1_absolute_indices
self._state.step1_relative_indices = step1_relative_indices
# objective
self._state.q0 = self._get_q(self._state.step1_absolute_indices)
c0_vec = self._state.op.objective.linear.to_array()[self._state.step1_absolute_indices]
self._state.c0 = c0_vec
self._state.q1 = self._get_q(self._state.continuous_indices)
self._state.c1 = self._state.op.objective.linear.to_array()[self._state.continuous_indices]
# equality constraints with binary vars only
self._state.a0, self._state.b0 = self._get_a0_b0()
def _get_step1_indices(self) -> Tuple[List[int], List[int]]:
"""
Constructs two arrays of absolute (pointing to the original problem) and relative (pointing
to the list of all binary variables) indices of the variables considered
to be included in the step1(QUBO) problem.
Returns: A tuple of lists with absolute and relative indices
"""
# here we keep binary indices from the original problem
step1_absolute_indices = []
# iterate over binary variables and put all binary variables mentioned in the objective
# to the array for the step1
for binary_index in self._state.binary_indices:
# here we check if this binary variable present in the objective
# either in the linear or quadratic terms
if self._state.op.objective.linear[binary_index] != 0 or np.abs(
self._state.op.objective.quadratic.coefficients[binary_index, :]).sum() != 0:
# add the variable if it was not added before
if binary_index not in step1_absolute_indices:
step1_absolute_indices.append(binary_index)
# compute all unverified binary variables (the variables that are present in constraints
# but not in objective):
# rest variables := all binary variables - already verified for step 1
rest_binary = set(self._state.binary_indices).difference(step1_absolute_indices)
# verify if an equality contains binary variables
for constraint in self._state.binary_equality_constraints:
for binary_index in list(rest_binary):
if constraint.linear[binary_index] > 0 \
and binary_index not in step1_absolute_indices:
# a binary variable with the binary_index is present in this constraint
step1_absolute_indices.append(binary_index)
# we want to preserve order of the variables but this order could be broken by adding
# a variable in the previous for loop.
step1_absolute_indices.sort()
# compute relative indices, these indices are used when we generate step1 and
# update variables on step1.
# on step1 we solve for a subset of all binary variables,
# so we want to operate only these indices
step1_relative_indices = []
relative_index = 0
# for each binary variable that comes from lin.eq/obj and which is denoted by abs_index
for abs_index in step1_absolute_indices:
found = False
# we want to find relative index of a variable the comes from linear constraints
# or objective across all binary variables
for j in range(relative_index, len(self._state.binary_indices)):
if self._state.binary_indices[j] == abs_index:
found = True
relative_index = j
break
if found:
step1_relative_indices.append(relative_index)
else:
raise ValueError("No relative index found!")
return step1_absolute_indices, step1_relative_indices
def _get_q(self, variable_indices: List[int]) -> np.ndarray:
"""Constructs a quadratic matrix for the variables with the specified indices
from the quadratic terms in the objective.
Args:
variable_indices: variable indices to look for.
Returns:
A matrix as a numpy array of the shape(len(variable_indices), len(variable_indices)).
"""
size = len(variable_indices)
q = np.zeros(shape=(size, size))
# fill in the matrix
# in fact we use re-indexed variables
# we build upper triangular matrix to avoid doubling of the coefficients
for i in range(0, size):
for j in range(i, size):
q[i, j] = \
self._state.op.objective.quadratic[variable_indices[i], variable_indices[j]]
return q
def _get_a0_b0(self) -> Tuple[np.ndarray, np.ndarray]:
"""Constructs a matrix and a vector from the constraints in a form of Ax = b, where
x is a vector of binary variables.
Returns:
Corresponding matrix and vector as numpy arrays.
Raises:
ValueError: if the problem is not suitable for this optimizer.
"""
matrix = []
vector = []
for constraint in self._state.binary_equality_constraints:
row = constraint.linear.to_array().take(self._state.step1_absolute_indices).tolist()
matrix.append(row)
vector.append(constraint.rhs)
if len(matrix) != 0:
np_matrix = np.array(matrix)
np_vector = np.array(vector)
else:
np_matrix = np.array([0] * len(self._state.step1_absolute_indices)).reshape((1, -1))
np_vector = np.zeros(shape=(1,))
return np_matrix, np_vector
def _create_step1_problem(self) -> QuadraticProgram:
"""Creates a step 1 sub-problem.
Returns:
A newly created optimization problem.
"""
op1 = QuadraticProgram()
binary_size = len(self._state.step1_absolute_indices)
# create the same binary variables.
for i in range(binary_size):
name = self._state.op.variables[self._state.step1_absolute_indices[i]].name
op1.binary_var(name=name)
# prepare and set quadratic objective.
quadratic_objective = self._state.q0 + \
self._params.factor_c / 2 * np.dot(self._state.a0.transpose(), self._state.a0) +\
self._state.rho / 2 * np.eye(binary_size)
op1.objective.quadratic = quadratic_objective
# prepare and set linear objective.
linear_objective = self._state.c0 - \
self._params.factor_c * np.dot(self._state.b0, self._state.a0) + \
self._state.rho * (- self._state.y[self._state.step1_relative_indices] -
self._state.z[self._state.step1_relative_indices]) + \
self._state.lambda_mult[self._state.step1_relative_indices]
op1.objective.linear = linear_objective
return op1
def _create_step2_problem(self) -> QuadraticProgram:
"""Creates a step 2 sub-problem.
Returns:
A newly created optimization problem.
"""
op2 = copy.deepcopy(self._state.op)
# replace binary variables with the continuous ones bound in [0,1]
# x0(bin) -> z(cts)
# u (cts) are still there unchanged
for i, var_index in enumerate(self._state.binary_indices):
variable = op2.variables[var_index]
variable.vartype = Variable.Type.CONTINUOUS
variable.upperbound = 1.
variable.lowerbound = 0.
# replacing Q0 objective and take of min/max sense, initially we consider minimization
op2.objective.quadratic[var_index, var_index] = self._state.rho / 2
# replacing linear objective
op2.objective.linear[var_index] = -1 * self._state.lambda_mult[i] - self._state.rho * \
(self._state.x0[i] - self._state.y[i])
# remove A0 x0 = b0 constraints
for constraint in self._state.binary_equality_constraints:
op2.remove_linear_constraint(constraint.name)
return op2
def _create_step3_problem(self) -> QuadraticProgram:
"""Creates a step 3 sub-problem.
Returns:
A newly created optimization problem.
"""
op3 = QuadraticProgram()
# add y variables.
binary_size = len(self._state.binary_indices)
for i in range(binary_size):
name = self._state.op.variables[self._state.binary_indices[i]].name
op3.continuous_var(lowerbound=-np.inf, upperbound=np.inf, name=name)
# set quadratic objective y
quadratic_y = self._params.beta / 2 * np.eye(binary_size) + \
self._state.rho / 2 * np.eye(binary_size)
op3.objective.quadratic = quadratic_y
# set linear objective for y
linear_y = - self._state.lambda_mult - self._state.rho * (self._state.x0 - self._state.z)
op3.objective.linear = linear_y
return op3
def _update_x0(self, op1: QuadraticProgram) -> np.ndarray:
"""Solves the Step1 QuadraticProgram via the qubo optimizer.
Args:
op1: the Step1 QuadraticProgram.
Returns:
A solution of the Step1, as a numpy array.
"""
x0_all_binaries = np.zeros(len(self._state.binary_indices))
x0_qubo = np.asarray(self._qubo_optimizer.solve(op1).x)
x0_all_binaries[self._state.step1_relative_indices] = x0_qubo
return x0_all_binaries
def _update_x1(self, op2: QuadraticProgram) -> Tuple[np.ndarray, np.ndarray]:
"""Solves the Step2 QuadraticProgram via the continuous optimizer.
Args:
op2: the Step2 QuadraticProgram
Returns:
A solution of the Step2, as a pair of numpy arrays.
First array contains the values of decision variables u, and
second array contains the values of decision variables z.
"""
vars_op2 = np.asarray(self._continuous_optimizer.solve(op2).x)
vars_u = vars_op2.take(self._state.continuous_indices)
vars_z = vars_op2.take(self._state.binary_indices)
return vars_u, vars_z
def _update_y(self, op3: QuadraticProgram) -> np.ndarray:
"""Solves the Step3 QuadraticProgram via the continuous optimizer.
Args:
op3: the Step3 QuadraticProgram
Returns:
A solution of the Step3, as a numpy array.
"""
return np.asarray(self._continuous_optimizer.solve(op3).x)
def _get_best_merit_solution(self) -> Tuple[np.ndarray, np.ndarray, float]:
"""The ADMM solution is that for which the merit value is the min
* sol: Iterate with the min merit value
* sol_val: Value of sol, according to the original objective
Returns:
A tuple of (binary_vars, continuous_vars, sol_val), where
* binary_vars: binary variable values with the min merit value
* continuous_vars: continuous variable values with the min merit value
* sol_val: Value of the objective function
"""
it_min_merits = self._state.merits.index(min(self._state.merits))
binary_vars = self._state.x0_saved[it_min_merits]
continuous_vars = self._state.u_saved[it_min_merits]
sol_val = self._state.cost_iterates[it_min_merits]
return binary_vars, continuous_vars, sol_val
def _update_lambda_mult(self) -> np.ndarray:
"""
Updates the values of lambda multiplier, given the updated iterates
x0, z, and y.
Returns: The updated array of values of lambda multiplier.
"""
return self._state.lambda_mult + \
self._state.rho * (self._state.x0 - self._state.z - self._state.y)
def _update_rho(self, primal_residual: float, dual_residual: float) -> None:
"""Updating the rho parameter in ADMM.
Args:
primal_residual: primal residual
dual_residual: dual residual
"""
if self._params.vary_rho == UPDATE_RHO_BY_TEN_PERCENT:
# Increase rho, to aid convergence.
if self._state.rho < 1.e+10:
self._state.rho *= 1.1
elif self._params.vary_rho == UPDATE_RHO_BY_RESIDUALS:
if primal_residual > self._params.mu_res * dual_residual:
self._state.rho = self._params.tau_incr * self._state.rho
elif dual_residual > self._params.mu_res * primal_residual:
self._state.rho = self._params.tau_decr * self._state.rho
def _get_constraint_residual(self) -> float:
"""Compute violation of the constraints of the original problem, as:
* norm 1 of the body-rhs of eq. constraints
* -1 * min(body - rhs, 0) for geq constraints
* max(body - rhs, 0) for leq constraints
Returns:
Violation of the constraints as a float value
"""
solution = self._get_current_solution()
# equality constraints
cr_eq = 0
for constraint in self._state.equality_constraints:
cr_eq += np.abs(constraint.evaluate(solution) - constraint.rhs)
# inequality constraints
cr_ineq = 0.0
for constraint in self._state.inequality_constraints:
sense = -1.0 if constraint.sense == Constraint.Sense.GE else 1.0
cr_ineq += max(sense * (constraint.evaluate(solution) - constraint.rhs), 0.0)
return cr_eq + cr_ineq
def _get_merit(self, cost_iterate: float, constraint_residual: float) -> float:
"""Compute merit value associated with the current iterate
Args:
cost_iterate: Cost at the certain iteration.
constraint_residual: Value of violation of the constraints.
Returns:
Merit value as a float
"""
return cost_iterate + self._params.mu_merit * constraint_residual
def _get_objective_value(self) -> float:
"""Computes the value of the objective function.
Returns:
Value of the objective function as a float
"""
return self._state.op.objective.evaluate(self._get_current_solution())
def _get_solution_residuals(self, iteration: int) -> Tuple[float, float]:
"""Compute primal and dual residual.
Args:
iteration: Iteration number.
Returns:
r, s as primary and dual residuals.
"""
elements = self._state.x0 - self._state.z - self._state.y
primal_residual = np.linalg.norm(elements)
if iteration > 0:
elements_dual = self._state.z - self._state.z_saved[iteration - 1]
else:
elements_dual = self._state.z - self._state.z_init
dual_residual = self._state.rho * np.linalg.norm(elements_dual)
return primal_residual, dual_residual
def _warm_start(self, problem: QuadraticProgram) -> None:
"""Solves a relaxed (all variables are continuous) and initializes the optimizer state with
the found solution.
Args:
problem: a problem to solve.
Returns:
None
"""
qp_copy = copy.deepcopy(problem)
for variable in qp_copy.variables:
variable.vartype = VarType.CONTINUOUS
cts_result = self._continuous_optimizer.solve(qp_copy)
logger.debug("Continuous relaxation: %s", cts_result.x)
self._state.x0 = cts_result.x[self._state.binary_indices]
self._state.u = cts_result.x[self._state.continuous_indices]
self._state.z = cts_result.x[self._state.binary_indices]
@property
def parameters(self) -> ADMMParameters:
"""Returns current parameters of the optimizer.
Returns:
The parameters.
"""
return self._params
@parameters.setter
def parameters(self, params: ADMMParameters) -> None:
"""Sets the parameters of the optimizer.
Args:
params: New parameters to set.
"""
self._params = params
| 42.292045
| 100
| 0.637961
|
206c47ebc0fdc5a6a9e4a9d94663da5d43e0ab4f
| 2,237
|
py
|
Python
|
code/DNN/dnn_classification-keras.py
|
Knowledge-Precipitation-Tribe/Neural-network
|
eac2e66cdde85b34ddf9313ce4d2b123cc1b8be8
|
[
"MIT"
] | 3
|
2021-05-25T10:18:23.000Z
|
2022-02-09T08:55:14.000Z
|
code/DNN/dnn_classification-keras.py
|
Knowledge-Precipitation-Tribe/Neural-network
|
eac2e66cdde85b34ddf9313ce4d2b123cc1b8be8
|
[
"MIT"
] | null | null | null |
code/DNN/dnn_classification-keras.py
|
Knowledge-Precipitation-Tribe/Neural-network
|
eac2e66cdde85b34ddf9313ce4d2b123cc1b8be8
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-#
'''
# Name: dnn_classification-keras
# Description:
# Author: super
# Date: 2020/6/2
'''
from MiniFramework.DataReader_2_0 import *
from keras.models import Sequential
from keras.layers import Dense
import matplotlib.pyplot as plt
import os
os.environ['KMP_DUPLICATE_LIB_OK']='True'
def load_data():
train_data_name = "../data/ch10.train.npz"
test_data_name = "../data/ch10.test.npz"
dataReader = DataReader_2_0(train_data_name, test_data_name)
dataReader.ReadData()
dataReader.NormalizeX()
dataReader.Shuffle()
dataReader.GenerateValidationSet()
x_train, y_train = dataReader.XTrain, dataReader.YTrain
x_test, y_test = dataReader.XTest, dataReader.YTest
x_val, y_val = dataReader.XDev, dataReader.YDev
return x_train, y_train, x_test, y_test, x_val, y_val
def build_model():
model = Sequential()
model.add(Dense(3, activation='sigmoid', input_shape=(2, )))
model.add(Dense(1, activation='sigmoid'))
model.compile(optimizer='Adam',
loss='binary_crossentropy',
metrics=['accuracy'])
return model
#画出训练过程中训练和验证的精度与损失
def draw_train_history(history):
plt.figure(1)
# summarize history for accuracy
plt.subplot(211)
plt.plot(history.history['accuracy'])
plt.plot(history.history['val_accuracy'])
plt.title('model accuracy')
plt.ylabel('accuracy')
plt.xlabel('epoch')
plt.legend(['train', 'validation'], loc='upper left')
# summarize history for loss
plt.subplot(212)
plt.plot(history.history['loss'])
plt.plot(history.history['val_loss'])
plt.title('model loss')
plt.ylabel('loss')
plt.xlabel('epoch')
plt.legend(['train', 'validation'], loc='upper left')
plt.show()
if __name__ == '__main__':
x_train, y_train, x_test, y_test, x_val, y_val = load_data()
model = build_model()
history = model.fit(x_train, y_train, epochs=200, batch_size=5, validation_data=(x_val, y_val))
draw_train_history(history)
loss, accuracy = model.evaluate(x_test, y_test)
print("test loss: {}, test accuracy: {}".format(loss, accuracy))
weights = model.get_weights()
print("weights: ", weights)
| 28.316456
| 99
| 0.675011
|
1a000d88a2cc54e47f4608e2a4b44d6fefafa5d2
| 12,426
|
py
|
Python
|
samples/train_cancer.py
|
robvcc/Mask_RCNN_shoe
|
67cb95bf931782a166ee8219c5dca41f660aa5a6
|
[
"MIT"
] | null | null | null |
samples/train_cancer.py
|
robvcc/Mask_RCNN_shoe
|
67cb95bf931782a166ee8219c5dca41f660aa5a6
|
[
"MIT"
] | null | null | null |
samples/train_cancer.py
|
robvcc/Mask_RCNN_shoe
|
67cb95bf931782a166ee8219c5dca41f660aa5a6
|
[
"MIT"
] | null | null | null |
import os
import numpy as np
import cv2
import matplotlib.pyplot as plt
from mrcnn.config import Config
#import utils
from mrcnn import model as modellib
from mrcnn import utils
from mrcnn import visualize
import yaml
from mrcnn.model import log
from PIL import Image
class ShapesConfig(Config):
"""Configuration for training on the toy shapes dataset.
Derives from the base Config class and overrides values specific
to the toy shapes dataset.
"""
# Give the configuration a recognizable name
NAME = "shape"
# Train on 1 GPU and 8 images per GPU. We can put multiple images on each
# GPU because the images are small. Batch size is 8 (GPUs * images/GPU).
GPU_COUNT = 1
IMAGES_PER_GPU = 8
# Number of classes (including background)
NUM_CLASSES = 1 + 8 # background + 8 shapes
# Use small images for faster training. Set the limits of the small side
# the large side, and that determines the image shape.
IMAGE_MIN_DIM = 100
IMAGE_MAX_DIM = 448
# Use smaller anchors because our image and objects are small
RPN_ANCHOR_SCALES = (8 * 6, 16 * 6, 32 * 6, 64 * 6, 128 * 6) # anchor side in pixels
# Reduce training ROIs per image because the images are small and have
# few objects. Aim to allow ROI sampling to pick 33% positive ROIs.
TRAIN_ROIS_PER_IMAGE = 100
# Use a small epoch since the data is simple
STEPS_PER_EPOCH = 100
# use small validation steps since the epoch is small
VALIDATION_STEPS = 50
class DrugDataset(utils.Dataset):
# 得到该图中有多少个实例(物体)
def get_obj_index(self, image):
n = np.max(image)
return n
# 解析labelme中得到的yaml文件,从而得到mask每一层对应的实例标签
def from_yaml_get_class(self, image_id):
info = self.image_info[image_id]
with open(info['yaml_path']) as f:
temp = yaml.load(f.read())
labels = temp['label_names']
del labels[0]
return labels
# 重新写draw_mask
def draw_mask(self, num_obj, mask, image,image_id):
#print("draw_mask-->",image_id)
#print("self.image_info",self.image_info)
info = self.image_info[image_id]
#print("info-->",info)
#print("info[width]----->",info['width'],"-info[height]--->",info['height'])
for index in range(num_obj):
for i in range(info['width']):
for j in range(info['height']):
#print("image_id-->",image_id,"-i--->",i,"-j--->",j)
#print("info[width]----->",info['width'],"-info[height]--->",info['height'])
at_pixel = image.getpixel((i, j))
if at_pixel == index + 1:
mask[j, i, index] = 1
return mask
# 重新写load_shapes,里面包含自己的类别,可以任意添加
# 并在self.image_info信息中添加了path、mask_path 、yaml_path
# yaml_pathdataset_root_path = "/tongue_dateset/"
# img_floder = dataset_root_path + "rgb"
# mask_floder = dataset_root_path + "mask"
# dataset_root_path = "/tongue_dateset/"
def load_shapes(self, count, img_floder, mask_floder, imglist, dataset_root_path):
"""Generate the requested number of synthetic images.
count: number of images to generate.
height, width: the size of the generated images.
"""
# Add classes,可通过这种方式扩展多个物体
self.add_class("shapes", 1, "high-heeled")
self.add_class("shapes", 2, "high-heeled-welt")
self.add_class("shapes", 3, "sports")
self.add_class("shapes", 4, "sports-welt")
self.add_class("shapes", 5, "sandals")
self.add_class("shapes", 6, "sandals-welt")
self.add_class("shapes", 7, "slippers")
self.add_class("shapes", 8, "slippers-welt")
for i in range(count):
# 获取图片宽和高
filestr = imglist[i].split(".")[0]
#print(imglist[i],"-->",cv_img.shape[1],"--->",cv_img.shape[0])
#print("id-->", i, " imglist[", i, "]-->", imglist[i],"filestr-->",filestr)
#filestr = filestr.split("_")[1]
mask_path = mask_floder + "/" + filestr + ".png"
yaml_path = dataset_root_path + "labelme_json/" + filestr + "_json/info.yaml"
# print(dataset_root_path + "labelme_json/" + filestr + "_json/img.png")
cv_img = cv2.imread(dataset_root_path + "pic/" + filestr + ".png")
self.add_image("shapes", image_id=i, path=img_floder + "/" + imglist[i],
width=cv_img.shape[1], height=cv_img.shape[0], mask_path=mask_path, yaml_path=yaml_path)
# 重写load_mask
def load_mask(self, image_id):
"""Generate instance masks for shapes of the given image ID.
"""
global iter_num
print("image_id",image_id)
info = self.image_info[image_id]
count = 1 # number of object
img = Image.open(info['mask_path'])
num_obj = self.get_obj_index(img)
mask = np.zeros([info['height'], info['width'], num_obj], dtype=np.uint8)
mask = self.draw_mask(num_obj, mask, img,image_id)
occlusion = np.logical_not(mask[:, :, -1]).astype(np.uint8)
for i in range(count - 2, -1, -1):
mask[:, :, i] = mask[:, :, i] * occlusion
occlusion = np.logical_and(occlusion, np.logical_not(mask[:, :, i]))
labels = []
labels = self.from_yaml_get_class(image_id)
labels_form = []
for i in range(len(labels)):
if labels[i].find("high-heeled-welt") != -1:
labels_form.append("high-heeled-welt")
elif labels[i].find("high-heeled") != -1:
labels_form.append("high-heeled")
elif labels[i].find("sports-welt") != -1:
labels_form.append("sports-welt")
elif labels[i].find("sports") != -1:
labels_form.append("sports")
elif labels[i].find("sandals-welt") != -1:
labels_form.append("sandals-welt")
elif labels[i].find("sandals") != -1:
labels_form.append("sandals")
elif labels[i].find("slippers-welt") != -1:
labels_form.append("slippers-welt")
elif labels[i].find("slippers") != -1:
labels_form.append("slippers")
class_ids = np.array([self.class_names.index(s) for s in labels_form])
return mask, class_ids.astype(np.int32)
class Detect_Config(ShapesConfig):
GPU_COUNT = 1
IMAGES_PER_GPU = 1
#shoes_train_class
class Shoes():
def __init__(self):
self.ROOT_DIR = os.path.abspath("..")
# Directory to save logs and trained model
self.MODEL_DIR = os.path.join(self.ROOT_DIR, "logs")
print(self.MODEL_DIR)
self.iter_num = 0
# Local path to trained weights file
self.COCO_MODEL_PATH = os.path.join(self.ROOT_DIR, "mask_rcnn_coco.h5")
# Download COCO trained weights from Releases if needed
if not os.path.exists(self.COCO_MODEL_PATH):
utils.download_trained_weights(self.COCO_MODEL_PATH)
self.config = ShapesConfig()
# self.config.display()
#self.dataset_root_path=self.ROOT_DIR+"/corner_data/"
self.dataset_root_path="/home/ljt/Shoe-data-V2/"
self.img_floder = self.dataset_root_path + "pic"
# print(img_floder)
self.mask_floder = self.dataset_root_path + "cv2_mask"
#yaml_floder = dataset_root_path
self.imglist = os.listdir(self.img_floder)
self.count = len(self.imglist)
#print(self.imglist)
#print(self.count)
self.detectconfig = Detect_Config()
def prepare_data(self):
#train与val数据集准备
self.dataset_train = DrugDataset()
self.dataset_train.load_shapes(self.count, self.img_floder, self.mask_floder, self.imglist, self.dataset_root_path)
self.dataset_train.prepare()
self.dataset_val = DrugDataset()
self.dataset_val.load_shapes(10, self.img_floder, self.mask_floder, self.imglist, self.dataset_root_path)
self.dataset_val.prepare()
print("dataset_val-->",self.dataset_val._image_ids)
def load_pretrain_model(self):
self.model = modellib.MaskRCNN(mode="training", config=self.config,
model_dir=self.MODEL_DIR)
init_with = "coco" # imagenet, coco, or last
if init_with == "imagenet":
self.model.load_weights(self.model.get_imagenet_weights(), by_name=True)
elif init_with == "coco":
# Load weights trained on MS COCO, but skip layers that
# are different due to the different number of classes
# See README for instructions to download the COCO weights
self.model.load_weights(self.COCO_MODEL_PATH, by_name=True,
exclude=["mrcnn_class_logits", "mrcnn_bbox_fc",
"mrcnn_bbox", "mrcnn_mask"])
elif init_with == "last":
# Load the last model you trained and continue training
self.model.load_weights(self.model.find_last()[1], by_name=True)
def tarin(self):
self.model.train(self.dataset_train, self.dataset_val,
learning_rate=self.config.LEARNING_RATE/10,
epochs=110,
layers='all')
def detect(self):
self.model = modellib.MaskRCNN(mode="inference", config=self.detectconfig,
model_dir=self.MODEL_DIR)
#SHARP_MODEL_PATH=os.path.join(SHARP_MODEL_DIR,"mask_rcnn_shapes_0000.h5")
self.SHARP_MODEL_PATH="/home/ljt/Mask_RCNN_shoes/logs/shape20190613T1601/mask_rcnn_shape_0010.h5"
self.model.load_weights(self.SHARP_MODEL_PATH, by_name=True)
print(self.SHARP_MODEL_PATH)
import skimage
Quilt_DIR="/home/ljt/Shoe-data-V2/test"
IMAGE_DIR=os.path.join(Quilt_DIR,"/")
#image = skimage.io.imread(os.path.join(IMAGE_DIR, "17.png"))
#image = skimage.io.imread("/home/ljt/Shoe-data-V2/test/1.png")
# image = skimage.io.imread("C:/Users/VCC/Desktop/3.jpg")
# Run detection
# print(image.shape)
#image = cv2.imread(os.path.join(IMAGE_DIR, "65.png"))
image = cv2.imread("/home/ljt/Shoe-data-V2/test/20.png")
results = self.model.detect([image], verbose=1)
r = results[0]
a = visualize.display_instances(image, r['rois'], r['masks'], r['class_ids'],
self.dataset_val.class_names, r['scores'])
def mAP(self):
image_ids = np.random.choice(self.dataset_val.image_ids, 10)
APs = []
for image_id in image_ids:
# Load image and ground truth data
image, image_meta, gt_class_id, gt_bbox, gt_mask = \
modellib.load_image_gt(self.dataset_val, self.detectconfig,
image_id, use_mini_mask=False)
molded_images = np.expand_dims(modellib.mold_image(image, self.detectconfig), 0)
# Run object detection
results = self.model.detect([image], verbose=0)
r = results[0]
# Compute AP
AP, precisions, recalls, overlaps = \
utils.compute_ap(gt_bbox, gt_class_id, gt_mask,
r["rois"], r["class_ids"], r["scores"], r['masks'])
APs.append(AP)
print("mAP: ", np.mean(APs))
def acc(self):
image_ids = np.random.choice(self.dataset_val.image_ids, 20)
acc = []
for image_id in image_ids:
# Load image and ground truth data
image, image_meta, gt_class_id, gt_bbox, gt_mask = \
modellib.load_image_gt(self.dataset_val, self.detectconfig,
image_id, use_mini_mask=False)
molded_images = np.expand_dims(modellib.mold_image(image, self.detectconfig), 0)
# Run object detection
results = self.model.detect([image], verbose=0)
r = results[0]
if not r["scores"]:
r["scores"] = [0.5662385]
# Compute acc
print(r["scores"])
acc.append(r["scores"])
print("acc: ", np.mean(acc))
if __name__ == "__main__":
shoes = Shoes()
shoes.prepare_data()
#shoes.load_pretrain_model()
#shoes.tarin()
shoes.detect()
shoes.acc()
| 40.875
| 123
| 0.601642
|
8203d2474e6c729b07d76277f99a3bc5cf2d0d6e
| 47,210
|
py
|
Python
|
flopy/plot/crosssection.py
|
emorway-usgs/flopy
|
1fa24026d890abc4508a39eddf9049399c1e4d3f
|
[
"CC0-1.0",
"BSD-3-Clause"
] | 351
|
2015-01-03T15:18:48.000Z
|
2022-03-31T09:46:43.000Z
|
flopy/plot/crosssection.py
|
emorway-usgs/flopy
|
1fa24026d890abc4508a39eddf9049399c1e4d3f
|
[
"CC0-1.0",
"BSD-3-Clause"
] | 1,256
|
2015-01-15T21:10:42.000Z
|
2022-03-31T22:43:06.000Z
|
flopy/plot/crosssection.py
|
emorway-usgs/flopy
|
1fa24026d890abc4508a39eddf9049399c1e4d3f
|
[
"CC0-1.0",
"BSD-3-Clause"
] | 553
|
2015-01-31T22:46:48.000Z
|
2022-03-31T17:43:35.000Z
|
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.colors
from matplotlib.patches import Polygon
from . import plotutil
from ..utils import geometry
import copy
import warnings
warnings.simplefilter("always", PendingDeprecationWarning)
class PlotCrossSection:
"""
Class to create a cross sectional plot of a model.
Parameters
----------
ax : matplotlib.pyplot axis
The plot axis. If not provided it, plt.gca() will be used.
model : flopy.modflow object
flopy model object. (Default is None)
modelgrid : flopy.discretization.Grid object
can be a StructuredGrid, VertexGrid, or UnstructuredGrid object
line : dict
Dictionary with either "row", "column", or "line" key. If key
is "row" or "column" key value should be the zero-based row or
column index for cross-section. If key is "line" value should
be an array of (x, y) tuples with vertices of cross-section.
Vertices should be in map coordinates consistent with xul,
yul, and rotation.
extent : tuple of floats
(xmin, xmax, ymin, ymax) will be used to specify axes limits. If None
then these will be calculated based on grid, coordinates, and rotation.
geographic_coords : bool
boolean flag to allow the user to plot cross section lines in
geographic coordinates. If False (default), cross section is plotted
as the distance along the cross section line.
"""
def __init__(
self,
model=None,
modelgrid=None,
ax=None,
line=None,
extent=None,
geographic_coords=False,
):
self.ax = ax
self.geographic_coords = geographic_coords
self.model = model
if modelgrid is not None:
self.mg = modelgrid
elif model is not None:
self.mg = model.modelgrid
else:
raise Exception("Cannot find model grid")
if self.mg.top is None or self.mg.botm is None:
raise AssertionError("modelgrid top and botm must be defined")
if not isinstance(line, dict):
raise AssertionError("A line dictionary must be provided")
line = {k.lower(): v for k, v in line.items()}
if len(line) != 1:
s = (
"only row, column, or line can be specified in line "
"dictionary keys specified: "
)
for k in line.keys():
s += f"{k} "
raise AssertionError(s)
if ax is None:
self.ax = plt.gca()
else:
self.ax = ax
onkey = list(line.keys())[0]
self.__geographic_xpts = None
# un-translate model grid into model coordinates
xcellcenters, ycellcenters = geometry.transform(
self.mg.xcellcenters,
self.mg.ycellcenters,
self.mg.xoffset,
self.mg.yoffset,
self.mg.angrot_radians,
inverse=True,
)
xverts, yverts = self.mg.cross_section_vertices
(
xverts,
yverts,
) = plotutil.UnstructuredPlotUtilities.irregular_shape_patch(
xverts, yverts
)
self.xvertices, self.yvertices = geometry.transform(
xverts,
yverts,
self.mg.xoffset,
self.mg.yoffset,
self.mg.angrot_radians,
inverse=True,
)
if onkey in ("row", "column"):
eps = 1.0e-4
xedge, yedge = self.mg.xyedges
if onkey == "row":
self.direction = "x"
ycenter = ycellcenters.T[0]
pts = [
(xedge[0] - eps, ycenter[int(line[onkey])]),
(xedge[-1] + eps, ycenter[int(line[onkey])]),
]
else:
self.direction = "y"
xcenter = xcellcenters[0, :]
pts = [
(xcenter[int(line[onkey])], yedge[0] + eps),
(xcenter[int(line[onkey])], yedge[-1] - eps),
]
else:
verts = line[onkey]
xp = []
yp = []
for [v1, v2] in verts:
xp.append(v1)
yp.append(v2)
xp, yp = self.mg.get_local_coords(xp, yp)
if np.max(xp) - np.min(xp) > np.max(yp) - np.min(yp):
# this is x-projection and we should buffer x by small amount
idx0 = list(xp).index(np.max(xp))
idx1 = list(xp).index(np.min(xp))
xp[idx0] += 1e-04
xp[idx1] -= 1e-04
self.direction = "x"
else:
# this is y-projection and we should buffer y by small amount
idx0 = list(yp).index(np.max(yp))
idx1 = list(yp).index(np.min(yp))
yp[idx0] += 1e-04
yp[idx1] -= 1e-04
self.direction = "y"
pts = [(xt, yt) for xt, yt in zip(xp, yp)]
self.pts = np.array(pts)
self.xypts = plotutil.UnstructuredPlotUtilities.line_intersect_grid(
self.pts, self.xvertices, self.yvertices
)
if len(self.xypts) < 2:
s = "cross-section cannot be created\n."
s += " less than 2 points intersect the model grid\n"
s += f" {len(self.xypts)} points intersect the grid."
raise Exception(s)
if self.geographic_coords:
# transform back to geographic coordinates
xypts = {}
for nn, pt in self.xypts.items():
xp = [t[0] for t in pt]
yp = [t[1] for t in pt]
xp, yp = geometry.transform(
xp,
yp,
self.mg.xoffset,
self.mg.yoffset,
self.mg.angrot_radians,
)
xypts[nn] = [(xt, yt) for xt, yt in zip(xp, yp)]
self.xypts = xypts
laycbd = []
self.ncb = 0
if self.model is not None:
if self.model.laycbd is not None:
laycbd = list(self.model.laycbd)
self.ncb = np.count_nonzero(laycbd)
if laycbd:
self.active = []
for k in range(self.mg.nlay):
self.active.append(1)
if laycbd[k] > 0:
self.active.append(0)
self.active = np.array(self.active, dtype=int)
else:
self.active = np.ones(self.mg.nlay, dtype=int)
self._nlay, self._ncpl, self.ncb = self.mg.cross_section_lay_ncpl_ncb(
self.ncb
)
top = self.mg.top.reshape(1, self._ncpl)
botm = self.mg.botm.reshape(self._nlay + self.ncb, self._ncpl)
self.elev = np.concatenate((top, botm), axis=0)
self.idomain = self.mg.idomain
if self.mg.idomain is None:
self.idomain = np.ones(botm.shape, dtype=int)
self.projpts = self.set_zpts(None)
# Create cross-section extent
if extent is None:
self.extent = self.get_extent()
else:
self.extent = extent
# this is actually x or y based on projection
self.xcenters = [
np.mean(np.array(v).T[0]) for i, v in sorted(self.projpts.items())
]
self.mean_dx = np.mean(
np.max(self.xvertices, axis=1) - np.min(self.xvertices, axis=1)
)
self.mean_dy = np.mean(
np.max(self.yvertices, axis=1) - np.min(self.yvertices, axis=1)
)
self._polygons = {}
# Set axis limits
self.ax.set_xlim(self.extent[0], self.extent[1])
self.ax.set_ylim(self.extent[2], self.extent[3])
@property
def polygons(self):
"""
Method to return cached matplotlib polygons for a cross
section
Returns
-------
dict : [matplotlib.patches.Polygon]
"""
if not self._polygons:
for cell, poly in self.projpts.items():
if len(poly) > 4:
# this is the rare multipolygon instance...
n = 0
p = []
polys = []
for vn, v in enumerate(poly):
if vn == 3 + 4 * n:
n += 1
p.append(v)
polys.append(p)
p = []
else:
p.append(v)
else:
polys = [poly]
for polygon in polys:
verts = plotutil.UnstructuredPlotUtilities.arctan2(
np.array(polygon)
)
if cell not in self._polygons:
self._polygons[cell] = [Polygon(verts, closed=True)]
else:
self._polygons[cell].append(
Polygon(verts, closed=True)
)
return copy.copy(self._polygons)
def get_extent(self):
"""
Get the extent of the rotated and offset grid
Returns
-------
tuple : (xmin, xmax, ymin, ymax)
"""
xpts = []
for _, verts in self.projpts.items():
for v in verts:
xpts.append(v[0])
xmin = np.min(xpts)
xmax = np.max(xpts)
ymin = np.min(self.elev)
ymax = np.max(self.elev)
return xmin, xmax, ymin, ymax
def plot_array(self, a, masked_values=None, head=None, **kwargs):
"""
Plot a three-dimensional array as a patch collection.
Parameters
----------
a : numpy.ndarray
Three-dimensional array to plot.
masked_values : iterable of floats, ints
Values to mask.
head : numpy.ndarray
Three-dimensional array to set top of patches to the minimum
of the top of a layer or the head value. Used to create
patches that conform to water-level elevations.
**kwargs : dictionary
keyword arguments passed to matplotlib.collections.PatchCollection
Returns
-------
patches : matplotlib.collections.PatchCollection
"""
ax = kwargs.pop("ax", self.ax)
if not isinstance(a, np.ndarray):
a = np.array(a)
if a.ndim > 1:
a = np.ravel(a)
if masked_values is not None:
for mval in masked_values:
a = np.ma.masked_values(a, mval)
if isinstance(head, np.ndarray):
projpts = self.set_zpts(np.ravel(head))
else:
projpts = None
pc = self.get_grid_patch_collection(a, projpts, **kwargs)
if pc is not None:
ax.add_collection(pc)
ax.set_xlim(self.extent[0], self.extent[1])
ax.set_ylim(self.extent[2], self.extent[3])
return pc
def plot_surface(self, a, masked_values=None, **kwargs):
"""
Plot a two- or three-dimensional array as line(s).
Parameters
----------
a : numpy.ndarray
Two- or three-dimensional array to plot.
masked_values : iterable of floats, ints
Values to mask.
**kwargs : dictionary
keyword arguments passed to matplotlib.pyplot.plot
Returns
-------
plot : list containing matplotlib.plot objects
"""
ax = kwargs.pop("ax", self.ax)
color = kwargs.pop("color", "b")
color = kwargs.pop("c", color)
if not isinstance(a, np.ndarray):
a = np.array(a)
if a.ndim > 1:
a = np.ravel(a)
if a.size % self._ncpl != 0:
raise AssertionError("Array size must be a multiple of ncpl")
if masked_values is not None:
for mval in masked_values:
a = np.ma.masked_values(a, mval)
d = {
i: (np.min(np.array(v).T[0]), np.max(np.array(v).T[0]))
for i, v in sorted(self.projpts.items())
}
surface = []
for cell, val in d.items():
if cell >= a.size:
continue
elif np.isnan(a[cell]):
continue
elif a[cell] is np.ma.masked:
continue
else:
line = ax.plot(
d[cell], [a[cell], a[cell]], color=color, **kwargs
)
surface.append(line)
ax.set_xlim(self.extent[0], self.extent[1])
ax.set_ylim(self.extent[2], self.extent[3])
return surface
def plot_fill_between(
self,
a,
colors=("blue", "red"),
masked_values=None,
head=None,
**kwargs,
):
"""
Plot a three-dimensional array as lines.
Parameters
----------
a : numpy.ndarray
Three-dimensional array to plot.
colors : list
matplotlib fill colors, two required
masked_values : iterable of floats, ints
Values to mask.
head : numpy.ndarray
Three-dimensional array to set top of patches to the minimum
of the top of a layer or the head value. Used to create
patches that conform to water-level elevations.
**kwargs : dictionary
keyword arguments passed to matplotlib.pyplot.plot
Returns
-------
plot : list containing matplotlib.fillbetween objects
"""
ax = kwargs.pop("ax", self.ax)
kwargs["colors"] = colors
if not isinstance(a, np.ndarray):
a = np.array(a)
a = np.ravel(a)
if masked_values is not None:
for mval in masked_values:
a = np.ma.masked_values(a, mval)
if isinstance(head, np.ndarray):
projpts = self.set_zpts(head)
else:
projpts = self.projpts
pc = self.get_grid_patch_collection(
a, projpts, fill_between=True, **kwargs
)
if pc is not None:
ax.add_collection(pc)
ax.set_xlim(self.extent[0], self.extent[1])
ax.set_ylim(self.extent[2], self.extent[3])
return pc
def contour_array(self, a, masked_values=None, head=None, **kwargs):
"""
Contour a two-dimensional array.
Parameters
----------
a : numpy.ndarray
Three-dimensional array to plot.
masked_values : iterable of floats, ints
Values to mask.
head : numpy.ndarray
Three-dimensional array to set top of patches to the minimum
of the top of a layer or the head value. Used to create
patches that conform to water-level elevations.
**kwargs : dictionary
keyword arguments passed to matplotlib.pyplot.contour
Returns
-------
contour_set : matplotlib.pyplot.contour
"""
import matplotlib.tri as tri
if not isinstance(a, np.ndarray):
a = np.array(a)
if a.ndim > 1:
a = np.ravel(a)
ax = kwargs.pop("ax", self.ax)
xcenters = self.xcenters
plotarray = np.array([a[cell] for cell in sorted(self.projpts)])
(
plotarray,
xcenters,
zcenters,
mplcontour,
) = self.mg.cross_section_set_contour_arrays(
plotarray, xcenters, head, self.elev, self.projpts
)
if not mplcontour:
if isinstance(head, np.ndarray):
zcenters = self.set_zcentergrid(np.ravel(head))
else:
zcenters = np.array(
[
np.mean(np.array(v).T[1])
for i, v in sorted(self.projpts.items())
]
)
# work around for tri-contour ignore vmin & vmax
# necessary for the tri-contour NaN issue fix
if "levels" not in kwargs:
vmin = kwargs.pop("vmin", np.nanmin(plotarray))
vmax = kwargs.pop("vmax", np.nanmax(plotarray))
levels = np.linspace(vmin, vmax, 7)
kwargs["levels"] = levels
# workaround for tri-contour nan issue
plotarray[np.isnan(plotarray)] = -(2 ** 31)
if masked_values is None:
masked_values = [-(2 ** 31)]
else:
masked_values = list(masked_values)
if -(2 ** 31) not in masked_values:
masked_values.append(-(2 ** 31))
ismasked = None
if masked_values is not None:
for mval in masked_values:
if ismasked is None:
ismasked = np.isclose(plotarray, mval)
else:
t = np.isclose(plotarray, mval)
ismasked += t
plot_triplot = kwargs.pop("plot_triplot", False)
if "extent" in kwargs:
extent = kwargs.pop("extent")
idx = (
(xcenters >= extent[0])
& (xcenters <= extent[1])
& (zcenters >= extent[2])
& (zcenters <= extent[3])
)
plotarray = plotarray[idx].flatten()
xcenters = xcenters[idx].flatten()
zcenters = zcenters[idx].flatten()
if mplcontour:
plotarray = np.ma.masked_array(plotarray, ismasked)
contour_set = ax.contour(xcenters, zcenters, plotarray, **kwargs)
else:
triang = tri.Triangulation(xcenters, zcenters)
if ismasked is not None:
ismasked = ismasked.flatten()
mask = np.any(
np.where(ismasked[triang.triangles], True, False), axis=1
)
triang.set_mask(mask)
contour_set = ax.tricontour(triang, plotarray, **kwargs)
if plot_triplot:
ax.triplot(triang, color="black", marker="o", lw=0.75)
ax.set_xlim(self.extent[0], self.extent[1])
ax.set_ylim(self.extent[2], self.extent[3])
return contour_set
def plot_inactive(self, ibound=None, color_noflow="black", **kwargs):
"""
Make a plot of inactive cells. If not specified, then pull ibound
from the self.ml
Parameters
----------
ibound : numpy.ndarray
ibound array to plot. (Default is ibound in 'BAS6' package.)
color_noflow : string
(Default is 'black')
Returns
-------
quadmesh : matplotlib.collections.QuadMesh
"""
if ibound is None:
if self.mg.idomain is None:
raise AssertionError("An idomain array must be provided")
else:
ibound = self.mg.idomain
plotarray = np.zeros(ibound.shape, dtype=int)
idx1 = ibound == 0
plotarray[idx1] = 1
plotarray = np.ma.masked_equal(plotarray, 0)
cmap = matplotlib.colors.ListedColormap(["0", color_noflow])
bounds = [0, 1, 2]
norm = matplotlib.colors.BoundaryNorm(bounds, cmap.N)
patches = self.plot_array(plotarray, cmap=cmap, norm=norm, **kwargs)
return patches
def plot_ibound(
self,
ibound=None,
color_noflow="black",
color_ch="blue",
color_vpt="red",
head=None,
**kwargs,
):
"""
Make a plot of ibound. If not specified, then pull ibound from the
self.model
Parameters
----------
ibound : numpy.ndarray
ibound array to plot. (Default is ibound in 'BAS6' package.)
color_noflow : string
(Default is 'black')
color_ch : string
Color for constant heads (Default is 'blue'.)
head : numpy.ndarray
Three-dimensional array to set top of patches to the minimum
of the top of a layer or the head value. Used to create
patches that conform to water-level elevations.
**kwargs : dictionary
keyword arguments passed to matplotlib.collections.PatchCollection
Returns
-------
patches : matplotlib.collections.PatchCollection
"""
if ibound is None:
if self.model is not None:
if self.model.version == "mf6":
color_ch = color_vpt
if self.mg.idomain is None:
raise AssertionError("Ibound/Idomain array must be provided")
ibound = self.mg.idomain
plotarray = np.zeros(ibound.shape, dtype=int)
idx1 = ibound == 0
idx2 = ibound < 0
plotarray[idx1] = 1
plotarray[idx2] = 2
plotarray = np.ma.masked_equal(plotarray, 0)
cmap = matplotlib.colors.ListedColormap(
["none", color_noflow, color_ch]
)
bounds = [0, 1, 2, 3]
norm = matplotlib.colors.BoundaryNorm(bounds, cmap.N)
# mask active cells
patches = self.plot_array(
plotarray,
masked_values=[0],
head=head,
cmap=cmap,
norm=norm,
**kwargs,
)
return patches
def plot_grid(self, **kwargs):
"""
Plot the grid lines.
Parameters
----------
kwargs : ax, colors. The remaining kwargs are passed into the
the LineCollection constructor.
Returns
-------
lc : matplotlib.collections.LineCollection
"""
ax = kwargs.pop("ax", self.ax)
col = self.get_grid_line_collection(**kwargs)
if col is not None:
ax.add_collection(col)
# ax.set_xlim(self.extent[0], self.extent[1])
# ax.set_ylim(self.extent[2], self.extent[3])
return col
def plot_bc(
self, name=None, package=None, kper=0, color=None, head=None, **kwargs
):
"""
Plot boundary conditions locations for a specific boundary
type from a flopy model
Parameters
----------
name : string
Package name string ('WEL', 'GHB', etc.). (Default is None)
package : flopy.modflow.Modflow package class instance
flopy package class instance. (Default is None)
kper : int
Stress period to plot
color : string
matplotlib color string. (Default is None)
head : numpy.ndarray
Three-dimensional array (structured grid) or
Two-dimensional array (vertex grid)
to set top of patches to the minimum of the top of a\
layer or the head value. Used to create
patches that conform to water-level elevations.
**kwargs : dictionary
keyword arguments passed to matplotlib.collections.PatchCollection
Returns
-------
patches : matplotlib.collections.PatchCollection
"""
if "ftype" in kwargs and name is None:
name = kwargs.pop("ftype")
# Find package to plot
if package is not None:
p = package
elif self.model is not None:
if name is None:
raise Exception("ftype not specified")
name = name.upper()
p = self.model.get_package(name)
else:
raise Exception("Cannot find package to plot")
# trap for mf6 'cellid' vs mf2005 'k', 'i', 'j' convention
if isinstance(p, list) or p.parent.version == "mf6":
if not isinstance(p, list):
p = [p]
idx = np.array([])
for pp in p:
if pp.package_type in ("lak", "sfr", "maw", "uzf"):
t = plotutil.advanced_package_bc_helper(pp, self.mg, kper)
else:
try:
mflist = pp.stress_period_data.array[kper]
except Exception as e:
raise Exception(
f"Not a list-style boundary package: {e!s}"
)
if mflist is None:
return
t = np.array(
[list(i) for i in mflist["cellid"]], dtype=int
).T
if len(idx) == 0:
idx = np.copy(t)
else:
idx = np.append(idx, t, axis=1)
else:
# modflow-2005 structured and unstructured grid
if p.package_type in ("uzf", "lak"):
idx = plotutil.advanced_package_bc_helper(p, self.mg, kper)
else:
try:
mflist = p.stress_period_data[kper]
except Exception as e:
raise Exception(
f"Not a list-style boundary package: {e!s}"
)
if mflist is None:
return
if len(self.mg.shape) == 3:
idx = [mflist["k"], mflist["i"], mflist["j"]]
else:
idx = mflist["node"]
if len(self.mg.shape) != 3:
plotarray = np.zeros((self._nlay, self._ncpl), dtype=int)
plotarray[tuple(idx)] = 1
else:
plotarray = np.zeros(
(self.mg.nlay, self.mg.nrow, self.mg.ncol), dtype=int
)
plotarray[idx[0], idx[1], idx[2]] = 1
plotarray = np.ma.masked_equal(plotarray, 0)
if color is None:
key = name[:3].upper()
if key in plotutil.bc_color_dict:
c = plotutil.bc_color_dict[key]
else:
c = plotutil.bc_color_dict["default"]
else:
c = color
cmap = matplotlib.colors.ListedColormap(["none", c])
bounds = [0, 1, 2]
norm = matplotlib.colors.BoundaryNorm(bounds, cmap.N)
patches = self.plot_array(
plotarray,
masked_values=[0],
head=head,
cmap=cmap,
norm=norm,
**kwargs,
)
return patches
def plot_vector(
self,
vx,
vy,
vz,
head=None,
kstep=1,
hstep=1,
normalize=False,
masked_values=None,
**kwargs,
):
"""
Plot a vector.
Parameters
----------
vx : np.ndarray
x component of the vector to be plotted (non-rotated)
array shape must be (nlay, nrow, ncol) for a structured grid
array shape must be (nlay, ncpl) for a unstructured grid
vy : np.ndarray
y component of the vector to be plotted (non-rotated)
array shape must be (nlay, nrow, ncol) for a structured grid
array shape must be (nlay, ncpl) for a unstructured grid
vz : np.ndarray
y component of the vector to be plotted (non-rotated)
array shape must be (nlay, nrow, ncol) for a structured grid
array shape must be (nlay, ncpl) for a unstructured grid
head : numpy.ndarray
MODFLOW's head array. If not provided, then the quivers will be
plotted in the cell center.
kstep : int
layer frequency to plot (default is 1)
hstep : int
horizontal frequency to plot (default is 1)
normalize : bool
boolean flag used to determine if vectors should be normalized
using the vector magnitude in each cell (default is False)
masked_values : iterable of floats
values to mask
kwargs : matplotlib.pyplot keyword arguments for the
plt.quiver method
Returns
-------
quiver : matplotlib.pyplot.quiver
result of the quiver function
"""
ax = kwargs.pop("ax", self.ax)
pivot = kwargs.pop("pivot", "middle")
# Check that the cross section is not arbitrary with a tolerance
# of the mean cell size in each direction
arbitrary = False
pts = self.pts
xuniform = [
True if abs(pts.T[0, 0] - i) < self.mean_dy else False
for i in pts.T[0]
]
yuniform = [
True if abs(pts.T[1, 0] - i) < self.mean_dx else False
for i in pts.T[1]
]
if not np.all(xuniform) and not np.all(yuniform):
arbitrary = True
if arbitrary:
err_msg = (
"plot_specific_discharge() does not "
"support arbitrary cross-sections"
)
raise AssertionError(err_msg)
# get ibound array to mask inactive cells
ib = np.ones((self.mg.nnodes,), dtype=int)
if self.mg.idomain is not None:
ib = self.mg.idomain.ravel()
# get the actual values to plot and set xcenters
if self.direction == "x":
u_tmp = vx
else:
u_tmp = vy * -1.0
# kstep implementation for vertex grid
projpts = {
key: value
for key, value in self.projpts.items()
if (key // self._ncpl) % kstep == 0
}
# set x and z centers
if isinstance(head, np.ndarray):
# pipe kstep to set_zcentergrid to assure consistent array size
zcenters = self.set_zcentergrid(np.ravel(head), kstep=kstep)
else:
zcenters = [
np.mean(np.array(v).T[1]) for i, v in sorted(projpts.items())
]
xcenters = np.array(
[np.mean(np.array(v).T[0]) for i, v in sorted(projpts.items())]
)
x = np.ravel(xcenters)
z = np.ravel(zcenters)
u = np.array([u_tmp.ravel()[cell] for cell in sorted(projpts)])
v = np.array([vz.ravel()[cell] for cell in sorted(projpts)])
ib = np.array([ib[cell] for cell in sorted(projpts)])
x = x[::hstep]
z = z[::hstep]
u = u[::hstep]
v = v[::hstep]
ib = ib[::hstep]
# mask values
if masked_values is not None:
for mval in masked_values:
to_mask = np.logical_or(u == mval, v == mval)
u[to_mask] = np.nan
v[to_mask] = np.nan
# normalize
if normalize:
vmag = np.sqrt(u ** 2.0 + v ** 2.0)
idx = vmag > 0.0
u[idx] /= vmag[idx]
v[idx] /= vmag[idx]
# mask with an ibound array
u[ib == 0] = np.nan
v[ib == 0] = np.nan
# plot with quiver
quiver = ax.quiver(x, z, u, v, pivot=pivot, **kwargs)
return quiver
def plot_pathline(
self, pl, travel_time=None, method="cell", head=None, **kwargs
):
"""
Plot the MODPATH pathlines
Parameters
----------
pl : list of rec arrays or a single rec array
rec array or list of rec arrays is data returned from
modpathfile PathlineFile get_data() or get_alldata()
methods. Data in rec array is 'x', 'y', 'z', 'time',
'k', and 'particleid'.
travel_time : float or str
travel_time is a travel time selection for the displayed
pathlines. If a float is passed then pathlines with times
less than or equal to the passed time are plotted. If a
string is passed a variety logical constraints can be added
in front of a time value to select pathlines for a select
period of time. Valid logical constraints are <=, <, >=, and
>. For example, to select all pathlines less than 10000 days
travel_time='< 10000' would be passed to plot_pathline.
(default is None)
method : str
"cell" shows only pathlines that intersect with a cell
"all" projects all pathlines onto the cross section regardless
of whether they intersect with a given cell
head : np.ndarray
optional adjustment to only show pathlines that are <= to
the top of the water table given a user supplied head array
kwargs : layer, ax, colors. The remaining kwargs are passed
into the LineCollection constructor.
Returns
-------
lc : matplotlib.collections.LineCollection
"""
from matplotlib.collections import LineCollection
from ..utils.geometry import point_in_polygon
# make sure pathlines is a list
if not isinstance(pl, list):
pl = [pl]
marker = kwargs.pop("marker", None)
markersize = kwargs.pop("markersize", None)
markersize = kwargs.pop("ms", markersize)
markercolor = kwargs.pop("markercolor", None)
markerevery = kwargs.pop("markerevery", 1)
ax = kwargs.pop("ax", self.ax)
if "colors" not in kwargs:
kwargs["colors"] = "0.5"
projpts = self.projpts
if head is not None:
projpts = self.set_zpts(head)
pl2 = []
for p in pl:
tp = plotutil.filter_modpath_by_travel_time(p, travel_time)
pl2.append(tp)
tp = plotutil.intersect_modpath_with_crosssection(
pl2,
projpts,
self.xvertices,
self.yvertices,
self.direction,
self._ncpl,
method=method,
)
plines = plotutil.reproject_modpath_to_crosssection(
tp,
projpts,
self.xypts,
self.direction,
self.mg,
self._ncpl,
self.geographic_coords,
)
# build linecollection and markers arrays
linecol = []
markers = []
for _, arr in plines.items():
arr = np.array(arr)
arr = arr[arr[:, 0].argsort()]
linecol.append(arr)
if marker is not None:
for xy in arr[::markerevery]:
markers.append(xy)
lc = None
if len(linecol) > 0:
lc = LineCollection(linecol, **kwargs)
ax.add_collection(lc)
if marker is not None:
markers = np.array(markers)
ax.plot(
markers[:, 0],
markers[:, 1],
lw=0,
marker=marker,
color=markercolor,
ms=markersize,
)
return lc
def plot_timeseries(
self, ts, travel_time=None, method="cell", head=None, **kwargs
):
"""
Plot the MODPATH timeseries.
Parameters
----------
ts : list of rec arrays or a single rec array
rec array or list of rec arrays is data returned from
modpathfile TimeseriesFile get_data() or get_alldata()
methods. Data in rec array is 'x', 'y', 'z', 'time',
'k', and 'particleid'.
travel_time : float or str
travel_time is a travel time selection for the displayed
pathlines. If a float is passed then pathlines with times
less than or equal to the passed time are plotted. If a
string is passed a variety logical constraints can be added
in front of a time value to select pathlines for a select
period of time. Valid logical constraints are <=, <, >=, and
>. For example, to select all pathlines less than 10000 days
travel_time='< 10000' would be passed to plot_pathline.
(default is None)
kwargs : layer, ax, colors. The remaining kwargs are passed
into the LineCollection constructor. If layer='all',
pathlines are output for all layers
Returns
-------
lo : list of Line2D objects
"""
if "color" in kwargs:
kwargs["markercolor"] = kwargs["color"]
return self.plot_pathline(
ts, travel_time=travel_time, method=method, head=head, **kwargs
)
def plot_endpoint(
self,
ep,
direction="ending",
selection=None,
selection_direction=None,
method="cell",
head=None,
**kwargs,
):
"""
Parameters
----------
Returns
-------
"""
ax = kwargs.pop("ax", self.ax)
# colorbar kwargs
createcb = kwargs.pop("colorbar", False)
colorbar_label = kwargs.pop("colorbar_label", "Endpoint Time")
shrink = float(kwargs.pop("shrink", 1.0))
# marker kwargs
s = kwargs.pop("s", np.sqrt(50))
s = float(kwargs.pop("size", s)) ** 2.0
cd = {}
if "c" not in kwargs:
vmin, vmax = 1e10, -1e10
for rec in ep:
tt = float(rec["time"] - rec["time0"])
if tt < vmin:
vmin = tt
if tt > vmax:
vmax = tt
cd[int(rec["particleid"])] = tt
kwargs["vmin"] = vmin
kwargs["vmax"] = vmax
else:
tc = kwargs.pop("c")
for rec in ep:
cd[int(rec["praticleid"])] = tc
tep, istart = plotutil.parse_modpath_selection_options(
ep, direction, selection, selection_direction
)[0:2]
projpts = self.projpts
if head is not None:
projpts = self.set_zpts(head)
tep = plotutil.intersect_modpath_with_crosssection(
tep,
projpts,
self.xvertices,
self.yvertices,
self.direction,
method=method,
starting=istart,
)
if not tep:
return
epdict = plotutil.reproject_modpath_to_crosssection(
tep,
projpts,
self.xypts,
self.direction,
self.mg,
self.geographic_coords,
starting=istart,
)
arr = []
c = []
for node, epl in sorted(epdict.items()):
c.append(cd[node])
for xy in epl:
arr.append(xy)
arr = np.array(arr)
sp = ax.scatter(arr[:, 0], arr[:, 1], c=c, s=s, **kwargs)
# add a colorbar for travel times
if createcb:
cb = plt.colorbar(sp, ax=ax, shrink=shrink)
cb.set_label(colorbar_label)
return sp
def get_grid_line_collection(self, **kwargs):
"""
Get a PatchCollection of the grid
Parameters
----------
**kwargs : dictionary
keyword arguments passed to matplotlib.collections.LineCollection
Returns
-------
PatchCollection : matplotlib.collections.LineCollection
"""
from matplotlib.collections import PatchCollection
edgecolor = kwargs.pop("colors", "grey")
edgecolor = kwargs.pop("color", edgecolor)
edgecolor = kwargs.pop("ec", edgecolor)
edgecolor = kwargs.pop("edgecolor", edgecolor)
facecolor = kwargs.pop("facecolor", "none")
facecolor = kwargs.pop("fc", facecolor)
polygons = [
p for _, polys in sorted(self.polygons.items()) for p in polys
]
if len(polygons) > 0:
patches = PatchCollection(
polygons, edgecolor=edgecolor, facecolor=facecolor, **kwargs
)
else:
patches = None
return patches
def set_zpts(self, vs):
"""
Get an array of projected vertices corrected with corrected
elevations based on minimum of cell elevation (self.elev) or
passed vs numpy.ndarray
Parameters
----------
vs : numpy.ndarray
Two-dimensional array to plot.
Returns
-------
zpts : dict
"""
# make vertex array based on projection direction
if vs is not None:
if not isinstance(vs, np.ndarray):
vs = np.array(vs)
if self.direction == "x":
xyix = 0
else:
xyix = -1
projpts = {}
nlay = self.mg.nlay + self.ncb
nodeskip = self.mg.cross_section_nodeskip(nlay, self.xypts)
cbcnt = 0
for k in range(1, nlay + 1):
if not self.active[k - 1]:
cbcnt += 1
continue
k, ns, ncbnn = self.mg.cross_section_adjust_indicies(k - 1, cbcnt)
top = self.elev[k - 1, :]
botm = self.elev[k, :]
d0 = 0
# trap to split multipolygons
xypts = []
for nn, verts in self.xypts.items():
if nn in nodeskip[ns - 1]:
continue
if len(verts) > 2:
i0 = 2
for ix in range(len(verts)):
if ix == i0 - 1:
xypts.append((nn, verts[i0 - 2 : i0]))
i0 += 2
else:
xypts.append((nn, verts))
xypts = sorted(xypts, key=lambda q: q[-1][xyix][xyix])
if self.direction == "y":
xypts = xypts[::-1]
for nn, verts in xypts:
if vs is None:
t = top[nn]
else:
t = vs[nn + ncbnn]
if np.isclose(t, -1e30):
t = botm[nn]
if t < botm[nn]:
t = botm[nn]
if top[nn] < t:
t = top[nn]
b = botm[nn]
if self.geographic_coords:
if self.direction == "x":
projt = [(v[0], t) for v in verts]
projb = [(v[0], b) for v in verts]
else:
projt = [(v[1], t) for v in verts]
projb = [(v[1], b) for v in verts]
else:
verts = np.array(verts).T
a2 = (np.max(verts[0]) - np.min(verts[0])) ** 2
b2 = (np.max(verts[1]) - np.min(verts[1])) ** 2
c = np.sqrt(a2 + b2)
d1 = d0 + c
projt = [(d0, t), (d1, t)]
projb = [(d0, b), (d1, b)]
d0 += c
projpt = projt + projb
node = nn + ncbnn
if node not in projpts:
projpts[node] = projpt
else:
projpts[node] += projpt
return projpts
def set_zcentergrid(self, vs, kstep=1):
"""
Get an array of z elevations at the center of a cell that is based
on minimum of cell top elevation (self.elev) or passed vs numpy.ndarray
Parameters
----------
vs : numpy.ndarray
Three-dimensional array to plot.
kstep : int
plotting layer interval
Returns
-------
zcentergrid : numpy.ndarray
"""
verts = self.set_zpts(vs)
zcenters = [
np.mean(np.array(v).T[1])
for i, v in sorted(verts.items())
if (i // self._ncpl) % kstep == 0
]
return zcenters
def get_grid_patch_collection(
self, plotarray, projpts=None, fill_between=False, **kwargs
):
"""
Get a PatchCollection of plotarray in unmasked cells
Parameters
----------
plotarray : numpy.ndarray
One-dimensional array to attach to the Patch Collection.
projpts : dict
dictionary defined by node number which contains model
patch vertices.
fill_between : bool
flag to create polygons that mimick the matplotlib fill between
method. Only used by the plot_fill_between method.
**kwargs : dictionary
keyword arguments passed to matplotlib.collections.PatchCollection
Returns
-------
patches : matplotlib.collections.PatchCollection
"""
from matplotlib.patches import Polygon
from matplotlib.collections import PatchCollection
use_cache = False
if projpts is None:
use_cache = True
projpts = self.polygons
vmin = kwargs.pop("vmin", None)
vmax = kwargs.pop("vmax", None)
match_original = False
if fill_between:
match_original = True
colors = kwargs.pop("colors")
rectcol = []
data = []
for cell, poly in sorted(projpts.items()):
if not use_cache:
if len(poly) > 4:
# multipolygon instance...
n = 0
p = []
polys = []
for vn, v in enumerate(poly):
if vn == 3 + 4 * n:
n += 1
p.append(v)
polys.append(p)
p = []
else:
p.append(v)
else:
polys = [poly]
else:
polys = poly
for polygon in polys:
if not use_cache:
polygon = plotutil.UnstructuredPlotUtilities.arctan2(
np.array(polygon)
)
if np.isnan(plotarray[cell]):
continue
elif plotarray[cell] is np.ma.masked:
continue
if use_cache:
rectcol.append(polygon)
elif fill_between:
x = list(set(np.array(polygon).T[0]))
y1 = np.max(np.array(polygon).T[1])
y = np.min(np.array(polygon).T[1])
v = plotarray[cell]
if v > y1:
v = y
if v < y:
v = y
p1 = [(x[0], y1), (x[1], y1), (x[1], v), (x[0], v)]
p2 = [(x[0], v), (x[1], v), (x[1], y), (x[0], y)]
rectcol.append(Polygon(p1, closed=True, color=colors[0]))
rectcol.append(Polygon(p2, closed=True, color=colors[1]))
else:
rectcol.append(Polygon(polygon, closed=True))
data.append(plotarray[cell])
if len(rectcol) > 0:
patches = PatchCollection(rectcol, match_original, **kwargs)
if not fill_between:
patches.set_array(np.array(data))
patches.set_clim(vmin, vmax)
else:
patches = None
return patches
| 31.515354
| 79
| 0.502245
|
06f0ea901eb9d0c87cc6b5f8a0f42075d0b79213
| 6,044
|
py
|
Python
|
wbia/viz/interact/interact_qres.py
|
WildMeOrg/wildbook-ia
|
a18d57611e5936bea02a964716466e062415aa1a
|
[
"Apache-2.0"
] | 20
|
2021-01-19T23:17:21.000Z
|
2022-03-21T10:25:56.000Z
|
wbia/viz/interact/interact_qres.py
|
solomonkimunyu/wildbook-ia
|
ac433d4f2a47b1d905c421a36c497f787003afc3
|
[
"Apache-2.0"
] | 16
|
2021-01-28T23:05:29.000Z
|
2022-03-31T20:39:36.000Z
|
wbia/viz/interact/interact_qres.py
|
solomonkimunyu/wildbook-ia
|
ac433d4f2a47b1d905c421a36c497f787003afc3
|
[
"Apache-2.0"
] | 9
|
2021-02-13T20:19:46.000Z
|
2022-03-29T10:47:11.000Z
|
# -*- coding: utf-8 -*-
import logging
import utool as ut
import wbia.plottool as pt
from wbia.plottool import plot_helpers as ph
from wbia.plottool import abstract_interaction
from wbia import viz
from wbia.viz.interact.interact_sver import ishow_sver
(print, rrr, profile) = ut.inject2(__name__, '[interact_qres]')
logger = logging.getLogger('wbia')
def ishow_analysis(ibs, cm, qreq_=None, **kwargs):
"""
CommandLine:
python -m wbia.viz.interact.interact_qres --test-ishow_analysis:0 --show
python -m wbia.viz.interact.interact_qres --test-ishow_analysis:1 --show
Example:
>>> # SLOW_DOCTEST
>>> from wbia.viz.interact.interact_qres import * # NOQA
>>> import wbia
>>> cm, qreq_ = wbia.testdata_cm()
>>> fig = ishow_analysis(qreq_.ibs, cm, qreq_=qreq_)
>>> pt.show_if_requested()
Example:
>>> # DISABLE_DOCTEST
>>> from wbia.viz.interact.interact_qres import * # NOQA
>>> import wbia
>>> cm, qreq_ = wbia.testdata_cm()
>>> fig = ishow_analysis(qreq_.ibs, cm, qreq_=qreq_)
>>> pt.show_if_requested()
"""
interact = InteractQres(ibs, cm, analysis=True, qreq_=qreq_, **kwargs)
interact.show_page()
interact.show()
return interact
BASE_CLASS = abstract_interaction.AbstractInteraction
class InteractQres(BASE_CLASS):
"""
Displays query chip, groundtruth matches, and top matches
THERE IS A DIFFERENCE BETWEEN THIS AND MATCH INTERACTION. THIS IS FOR
DISPLAYING THE RANKED LIST MATCH INTERACTION IS LOOKING AT A SINGLE PAIR
SeeAlso:
#interact_matches.MatchInteraction2
#wbia.viz.interact.MatchInteraction
"""
def __init__(self, ibs, cm, analysis=False, qreq_=None, **kwargs):
self.ibs = ibs
self.cm = cm
self.analysis = analysis
self.qreq_ = qreq_
self.kwargs = kwargs.copy()
self.verbose = True
super(InteractQres, self).__init__(**kwargs)
self.fnum
logger.info('self.fnum = %r' % (self.fnum,))
def plot(self, *args, **kwargs):
if self.analysis:
self._analysis_view(toggle=1)
else:
self._top_matches_view(toggle=1)
def _top_matches_view(self, toggle=0):
# Toggle if the click is not in any axis
self.kwargs['annot_mode'] = self.kwargs.get('annot_mode', 0) + toggle
self.kwargs['fnum'] = self.fnum
fig = viz.show_qres(self.ibs, self.cm, qreq_=self.qreq_, **self.kwargs)
return fig
def _analysis_view(self, toggle=0):
# Toggle if the click is not in any axis
if self.verbose:
logger.info('clicked none')
self.kwargs['annot_mode'] = self.kwargs.get('annot_mode', 0) + toggle
self.kwargs['fnum'] = self.fnum
# if isinstance(self.cm, chip_match.ChipMatch):
fig = self.cm.show_analysis(self.qreq_, **self.kwargs)
# else:
# fig = self.cm.show_analysis(self.ibs, qreq_=self.qreq_, **self.kwargs)
self.draw()
return fig
def show_sver_process_to_aid(self, aid2):
if self.verbose:
logger.info('ctrl+clicked aid2=%r' % aid2)
fnum_ = pt.next_fnum()
ishow_sver(self.ibs, self.cm.qaid, aid2, qreq_=self.qreq_, fnum=fnum_)
self.draw()
self.bring_to_front()
def show_matches_to_aid(self, aid2):
if self.verbose:
logger.info('clicked aid2=%r' % aid2)
fnum_ = pt.next_fnum()
# if isinstance(self.cm, chip_match.ChipMatch):
self.cm.ishow_match(self.qreq_, aid2, fnum=fnum_)
# else:
# self.cm.ishow_matches(self.ibs, aid2, qreq_=self.qreq_, fnum=fnum_)
self.draw()
# self.bring_to_front()
# fig = pt.gcf()
# fig.canvas.draw()
# pt.bring_to_front(fig)
def on_click_outside(self, event):
self.show_page()
def on_click_inside(self, event, ax):
ax = event.inaxes
viztype = ph.get_plotdat(ax, 'viztype', '')
# if verbose:
# logger.info(str(event.__dict__))
logger.info('viztype=%r' % viztype)
# Clicked a specific matches
logger.info('plodat_dict = ' + ut.repr2(ph.get_plotdat_dict(ax)))
if viztype.startswith('chip'):
from wbia.viz.interact import interact_chip
options = interact_chip.build_annot_context_options(
self.ibs,
self.cm.qaid,
refresh_func=self._analysis_view,
with_interact_chip=False,
)
self.show_popup_menu(options, event)
if viztype.startswith('matches') or viztype == 'multi_match': # why startswith?
aid2 = ph.get_plotdat(ax, 'aid2', None)
aid_list = ph.get_plotdat(ax, 'aid_list', None)
if event.button == 3: # right-click
# TODO; this functionality should be in viz.interact
from wbia.gui import inspect_gui
logger.info('right click')
logger.info('qreq_ = %r' % (self.qreq_,))
options = inspect_gui.get_aidpair_context_menu_options(
self.ibs,
self.cm.qaid,
aid2,
self.cm,
qreq_=self.qreq_,
update_callback=self.show_page,
backend_callback=None,
aid_list=aid_list,
)
self.show_popup_menu(options, event)
else:
# Ctrl-Click
key = '' if event.key is None else event.key
logger.info('key = %r' % key)
if key.find('control') == 0:
logger.info('[viz] result control clicked')
self.show_sver_process_to_aid(aid2)
# Left-Click
else:
logger.info('[viz] result clicked')
self.show_matches_to_aid(aid2)
self.draw()
| 35.345029
| 88
| 0.58405
|
a8488f5e4727af056052b884ad58eaed19ccaa8b
| 3,934
|
py
|
Python
|
tests/addons/test_config.py
|
carver7/supervisor-master
|
e9802f92c9f77481276ed3c0d524427cc03e4271
|
[
"Apache-2.0"
] | null | null | null |
tests/addons/test_config.py
|
carver7/supervisor-master
|
e9802f92c9f77481276ed3c0d524427cc03e4271
|
[
"Apache-2.0"
] | null | null | null |
tests/addons/test_config.py
|
carver7/supervisor-master
|
e9802f92c9f77481276ed3c0d524427cc03e4271
|
[
"Apache-2.0"
] | null | null | null |
"""Validate Add-on configs."""
import pytest
import voluptuous as vol
from supervisor.addons import validate as vd
from ..common import load_json_fixture
def test_basic_config():
"""Validate basic config and check the default values."""
config = load_json_fixture("basic-addon-config.json")
valid_config = vd.SCHEMA_ADDON_CONFIG(config)
assert valid_config["name"] == "Test Add-on"
assert valid_config["image"] == "test/{arch}-my-custom-addon"
# Check defaults
assert not valid_config["host_network"]
assert not valid_config["host_ipc"]
assert not valid_config["host_dbus"]
assert not valid_config["host_pid"]
assert not valid_config["hassio_api"]
assert not valid_config["homeassistant_api"]
assert not valid_config["docker_api"]
def test_invalid_repository():
"""Validate basic config with invalid repositories."""
config = load_json_fixture("basic-addon-config.json")
config["image"] = "something"
with pytest.raises(vol.Invalid):
vd.SCHEMA_ADDON_CONFIG(config)
config["image"] = "homeassistant/no-valid-repo:no-tag-allow"
with pytest.raises(vol.Invalid):
vd.SCHEMA_ADDON_CONFIG(config)
config[
"image"
] = "registry.gitlab.com/company/add-ons/test-example/text-example:no-tag-allow"
with pytest.raises(vol.Invalid):
vd.SCHEMA_ADDON_CONFIG(config)
def test_valid_repository():
"""Validate basic config with different valid repositories."""
config = load_json_fixture("basic-addon-config.json")
custom_registry = "registry.gitlab.com/company/add-ons/core/test-example"
config["image"] = custom_registry
valid_config = vd.SCHEMA_ADDON_CONFIG(config)
assert valid_config["image"] == custom_registry
def test_valid_map():
"""Validate basic config with different valid maps."""
config = load_json_fixture("basic-addon-config.json")
config["map"] = ["backup:rw", "ssl:ro", "config"]
vd.SCHEMA_ADDON_CONFIG(config)
def test_valid_basic_build():
"""Validate basic build config."""
config = load_json_fixture("basic-build-config.json")
vd.SCHEMA_BUILD_CONFIG(config)
def test_valid_machine():
"""Validate valid machine config."""
config = load_json_fixture("basic-addon-config.json")
config["machine"] = [
"intel-nuc",
"odroid-c2",
"odroid-n2",
"odroid-xu",
"qemuarm-64",
"qemuarm",
"qemux86-64",
"qemux86",
"raspberrypi",
"raspberrypi2",
"raspberrypi3-64",
"raspberrypi3",
"raspberrypi4-64",
"raspberrypi4",
"tinker",
]
assert vd.SCHEMA_ADDON_CONFIG(config)
config["machine"] = [
"!intel-nuc",
"!odroid-c2",
"!odroid-n2",
"!odroid-xu",
"!qemuarm-64",
"!qemuarm",
"!qemux86-64",
"!qemux86",
"!raspberrypi",
"!raspberrypi2",
"!raspberrypi3-64",
"!raspberrypi3",
"!raspberrypi4-64",
"!raspberrypi4",
"!tinker",
]
assert vd.SCHEMA_ADDON_CONFIG(config)
config["machine"] = [
"odroid-n2",
"!odroid-xu",
"qemuarm-64",
"!qemuarm",
"qemux86-64",
"qemux86",
"raspberrypi",
"raspberrypi4-64",
"raspberrypi4",
"!tinker",
]
assert vd.SCHEMA_ADDON_CONFIG(config)
def test_invalid_machine():
"""Validate invalid machine config."""
config = load_json_fixture("basic-addon-config.json")
config["machine"] = [
"intel-nuc",
"raspberrypi3",
"raspberrypi4-64",
"raspberrypi4",
"tinkerxy",
]
with pytest.raises(vol.Invalid):
assert vd.SCHEMA_ADDON_CONFIG(config)
config["machine"] = [
"intel-nuc",
"intel-nuc",
]
with pytest.raises(vol.Invalid):
assert vd.SCHEMA_ADDON_CONFIG(config)
| 25.057325
| 84
| 0.624555
|
e46778e2334b0fb6582c208a92a587c1a93db058
| 2,884
|
py
|
Python
|
linear_classifier.py
|
ashwanikumar04/udacity-mlnd-capstone
|
f4b067b9f950f5b2d1763b808d296903345577a0
|
[
"MIT"
] | 1
|
2019-07-15T17:08:49.000Z
|
2019-07-15T17:08:49.000Z
|
linear_classifier.py
|
ashwanikumar04/udacity-mlnd-capstone
|
f4b067b9f950f5b2d1763b808d296903345577a0
|
[
"MIT"
] | null | null | null |
linear_classifier.py
|
ashwanikumar04/udacity-mlnd-capstone
|
f4b067b9f950f5b2d1763b808d296903345577a0
|
[
"MIT"
] | 1
|
2020-01-10T05:16:40.000Z
|
2020-01-10T05:16:40.000Z
|
import tensorflow as tf
from helpers import one_hot_encode, get_batch, get_training_set, get_test_set, log
from sklearn.utils import shuffle
class LinearClassifer:
def __init__(self, params, labels, image_size):
self.params = params
self.labels = labels
self.image_size = image_size
def run(self, train_X, train_y, test_X, test_y, validate_X, validate_y):
accuracyDictionary = {}
x = tf.placeholder(tf.float32, shape=[None, self.image_size])
W = tf.Variable(tf.zeros([self.image_size, self.labels]))
b = tf.Variable(tf.zeros([self.labels]))
y = tf.matmul(x, W) + b
y_true = tf.placeholder(tf.float32, [None, self.labels])
loss = tf.reduce_mean(
tf.nn.softmax_cross_entropy_with_logits_v2(labels=y_true, logits=y))
optimizer = tf.train.GradientDescentOptimizer(
learning_rate=self.params.learning_rate)
goal = optimizer.minimize(loss)
init = tf.global_variables_initializer()
correct_prediction = tf.equal(tf.argmax(y, 1), tf.argmax(y_true, 1))
accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))
loss_trace = []
train_acc = []
test_acc = []
with tf.Session() as sess:
sess.run(init)
for step in range(self.params.epoch):
X, y = shuffle(train_X, train_y)
for current_batch in range(self.params.num_batches):
batch_X, batch_y = get_batch(
X, y, current_batch, self.params.batch_size)
sess.run(goal, feed_dict={x: batch_X, y_true: batch_y})
if step % self.params.epoch_to_report == 0:
log(step, "Epoch")
temp_loss = sess.run(
loss, feed_dict={x: batch_X, y_true: batch_y})
# convert into a matrix, and the shape of the placeholder to correspond
temp_train_acc = sess.run(
accuracy, feed_dict={x: train_X, y_true: train_y})
temp_test_acc = sess.run(accuracy, feed_dict={
x: test_X, y_true: test_y})
# recode the result
loss_trace.append(temp_loss)
train_acc.append(temp_train_acc)
test_acc.append(temp_test_acc)
accuracyDictionary[step] = sess.run(accuracy, feed_dict={x: test_X,
y_true: test_y})
log(accuracyDictionary[step], "model accuracy")
log(sess.run(accuracy, feed_dict={x: validate_X,
y_true: validate_y}), "Final accuracy")
return accuracyDictionary, loss_trace, train_acc, test_acc
| 49.724138
| 93
| 0.564147
|
b8728af35d6022f1ab9036d8162c2f4d90db326f
| 6,434
|
py
|
Python
|
contrib/pyminer/pyminer.py
|
TimMaylon/corecoin
|
650b4829e81e51110f0abf13bba0e77d73eb1c07
|
[
"MIT"
] | null | null | null |
contrib/pyminer/pyminer.py
|
TimMaylon/corecoin
|
650b4829e81e51110f0abf13bba0e77d73eb1c07
|
[
"MIT"
] | null | null | null |
contrib/pyminer/pyminer.py
|
TimMaylon/corecoin
|
650b4829e81e51110f0abf13bba0e77d73eb1c07
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python
#
# Copyright (c) 2011 The Bitcoin developers
# Distributed under the MIT/X11 software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
import time
import json
import pprint
import hashlib
import struct
import re
import base64
import httplib
import sys
from multiprocessing import Process
ERR_SLEEP = 15
MAX_NONCE = 1000000L
settings = {}
pp = pprint.PrettyPrinter(indent=4)
class BitcoinRPC:
OBJID = 1
def __init__(self, host, port, username, password):
authpair = "%s:%s" % (username, password)
self.authhdr = "Basic %s" % (base64.b64encode(authpair))
self.conn = httplib.HTTPConnection(host, port, False, 30)
def rpc(self, method, params=None):
self.OBJID += 1
obj = { 'version' : '1.1',
'method' : method,
'id' : self.OBJID }
if params is None:
obj['params'] = []
else:
obj['params'] = params
self.conn.request('POST', '/', json.dumps(obj),
{ 'Authorization' : self.authhdr,
'Content-type' : 'application/json' })
resp = self.conn.getresponse()
if resp is None:
print "JSON-RPC: no response"
return None
body = resp.read()
resp_obj = json.loads(body)
if resp_obj is None:
print "JSON-RPC: cannot JSON-decode body"
return None
if 'error' in resp_obj and resp_obj['error'] != None:
return resp_obj['error']
if 'result' not in resp_obj:
print "JSON-RPC: no result in object"
return None
return resp_obj['result']
def getblockcount(self):
return self.rpc('getblockcount')
def getwork(self, data=None):
return self.rpc('getwork', data)
def uint32(x):
return x & 0xffffffffL
def bytereverse(x):
return uint32(( ((x) << 24) | (((x) << 8) & 0x00ff0000) |
(((x) >> 8) & 0x0000ff00) | ((x) >> 24) ))
def bufreverse(in_buf):
out_words = []
for i in range(0, len(in_buf), 4):
word = struct.unpack('@I', in_buf[i:i+4])[0]
out_words.append(struct.pack('@I', bytereverse(word)))
return ''.join(out_words)
def wordreverse(in_buf):
out_words = []
for i in range(0, len(in_buf), 4):
out_words.append(in_buf[i:i+4])
out_words.reverse()
return ''.join(out_words)
class Miner:
def __init__(self, id):
self.id = id
self.max_nonce = MAX_NONCE
def work(self, datastr, targetstr):
# decode work data hex string to binary
static_data = datastr.decode('hex')
static_data = bufreverse(static_data)
# the first 76b of 80b do not change
blk_hdr = static_data[:76]
# decode 256-bit target value
targetbin = targetstr.decode('hex')
targetbin = targetbin[::-1] # byte-swap and dword-swap
targetbin_str = targetbin.encode('hex')
target = long(targetbin_str, 16)
# pre-hash first 76b of block header
static_hash = hashlib.sha256()
static_hash.update(blk_hdr)
for nonce in xrange(self.max_nonce):
# encode 32-bit nonce value
nonce_bin = struct.pack("<I", nonce)
# hash final 4b, the nonce value
hash1_o = static_hash.copy()
hash1_o.update(nonce_bin)
hash1 = hash1_o.digest()
# sha256 hash of sha256 hash
hash_o = hashlib.sha256()
hash_o.update(hash1)
hash = hash_o.digest()
# quick test for winning solution: high 32 bits zero?
if hash[-4:] != '\0\0\0\0':
continue
# convert binary hash to 256-bit Python long
hash = bufreverse(hash)
hash = wordreverse(hash)
hash_str = hash.encode('hex')
l = long(hash_str, 16)
# proof-of-work test: hash < target
if l < target:
print time.asctime(), "PROOF-OF-WORK found: %064x" % (l,)
return (nonce + 1, nonce_bin)
else:
print time.asctime(), "PROOF-OF-WORK false positive %064x" % (l,)
# return (nonce + 1, nonce_bin)
return (nonce + 1, None)
def submit_work(self, rpc, original_data, nonce_bin):
nonce_bin = bufreverse(nonce_bin)
nonce = nonce_bin.encode('hex')
solution = original_data[:152] + nonce + original_data[160:256]
param_arr = [ solution ]
result = rpc.getwork(param_arr)
print time.asctime(), "--> Upstream RPC result:", result
def iterate(self, rpc):
work = rpc.getwork()
if work is None:
time.sleep(ERR_SLEEP)
return
if 'data' not in work or 'target' not in work:
time.sleep(ERR_SLEEP)
return
time_start = time.time()
(hashes_done, nonce_bin) = self.work(work['data'],
work['target'])
time_end = time.time()
time_diff = time_end - time_start
self.max_nonce = long(
(hashes_done * settings['scantime']) / time_diff)
if self.max_nonce > 0xfffffffaL:
self.max_nonce = 0xfffffffaL
if settings['hashmeter']:
print "HashMeter(%d): %d hashes, %.2f Khash/sec" % (
self.id, hashes_done,
(hashes_done / 1000.0) / time_diff)
if nonce_bin is not None:
self.submit_work(rpc, work['data'], nonce_bin)
def loop(self):
rpc = BitcoinRPC(settings['host'], settings['port'],
settings['rpcuser'], settings['rpcpass'])
if rpc is None:
return
while True:
self.iterate(rpc)
def miner_thread(id):
miner = Miner(id)
miner.loop()
if __name__ == '__main__':
if len(sys.argv) != 2:
print "Usage: pyminer.py CONFIG-FILE"
sys.exit(1)
f = open(sys.argv[1])
for line in f:
# skip comment lines
m = re.search('^\s*#', line)
if m:
continue
# parse key=value lines
m = re.search('^(\w+)\s*=\s*(\S.*)$', line)
if m is None:
continue
settings[m.group(1)] = m.group(2)
f.close()
if 'host' not in settings:
settings['host'] = '127.0.0.1'
if 'port' not in settings:
settings['port'] = 4496
if 'threads' not in settings:
settings['threads'] = 1
if 'hashmeter' not in settings:
settings['hashmeter'] = 0
if 'scantime' not in settings:
settings['scantime'] = 30L
if 'rpcuser' not in settings or 'rpcpass' not in settings:
print "Missing username and/or password in cfg file"
sys.exit(1)
settings['port'] = int(settings['port'])
settings['threads'] = int(settings['threads'])
settings['hashmeter'] = int(settings['hashmeter'])
settings['scantime'] = long(settings['scantime'])
thr_list = []
for thr_id in range(settings['threads']):
p = Process(target=miner_thread, args=(thr_id,))
p.start()
thr_list.append(p)
time.sleep(1) # stagger threads
print settings['threads'], "mining threads started"
print time.asctime(), "Miner Starts - %s:%s" % (settings['host'], settings['port'])
try:
for thr_proc in thr_list:
thr_proc.join()
except KeyboardInterrupt:
pass
print time.asctime(), "Miner Stops - %s:%s" % (settings['host'], settings['port'])
| 25.43083
| 84
| 0.664905
|
f4b1987679486f5e35154e82baf63a30d1a703d0
| 6,906
|
py
|
Python
|
nimbus/fabnet/asyncio_rpc.py
|
fabregas/nimbusfs-node
|
7af3ecc14f78526b477ed29fb9e9b9eb972d6b4e
|
[
"Apache-2.0"
] | null | null | null |
nimbus/fabnet/asyncio_rpc.py
|
fabregas/nimbusfs-node
|
7af3ecc14f78526b477ed29fb9e9b9eb972d6b4e
|
[
"Apache-2.0"
] | null | null | null |
nimbus/fabnet/asyncio_rpc.py
|
fabregas/nimbusfs-node
|
7af3ecc14f78526b477ed29fb9e9b9eb972d6b4e
|
[
"Apache-2.0"
] | null | null | null |
import asyncio
import random
import uuid
import sys
import traceback
import pickle
import inspect
from base64 import b64encode
from hashlib import sha1
from .utils import logger
class TimeoutException(Exception):
pass
class RemoteError(Exception):
pass
def serializable_error(msg):
traceback.print_exc(file=sys.stderr)
return {'__error__': str(msg)}
def check_remote_error(obj):
if isinstance(obj, dict):
if '__error__' in obj:
return RemoteError(obj['__error__'])
# packet's markers
PM_REQUEST = 0
PM_RESPONSE = 1
PM_END = bytes([66, 99, 66])
class AbstractRPC(asyncio.Protocol):
def __init__(self, wait_response_time=5):
super(asyncio.Protocol, self).__init__()
self.wait_response_time = wait_response_time
self._outstanding = {}
self.buf = bytes()
def connection_made(self, transport):
self.transport = transport
def _accept_request(self, msg_id, data, address):
if not isinstance(data, list) or len(data) != 3:
raise RuntimeError("Could not read packet: %s" % data)
funcname, args, kwargs = data
api_method = getattr(self, "api_%s" % funcname, None)
if api_method is None or not callable(api_method):
logger.info("%s has no callable method api_%s; ignoring request",
self.__class__.__name__, funcname)
return
@asyncio.coroutine
def proc_request(address, *args):
if inspect.isgeneratorfunction(api_method):
ret = yield from api_method(address, *args)
return ret
else:
return api_method(address, *args)
def resp_func(task):
try:
resp = task.result()
except Exception as err:
resp = serializable_error(err)
txdata = b'\x01' + msg_id + pickle.dumps(resp) + PM_END
self._send_response(address, txdata)
resp = asyncio.async(proc_request(address, *args))
if not kwargs.get('nowait', False):
resp.add_done_callback(resp_func)
def _accept_response(self, msg_id, data, address):
if msg_id not in self._outstanding:
msgargs = (b64encode(msg_id), address)
logger.info("received unknown message %s from %s; ignoring"
% msgargs)
return
future, timeout = self._outstanding[msg_id]
timeout.cancel()
err = check_remote_error(data)
if err:
future.set_exception(err)
else:
future.set_result(data)
del self._outstanding[msg_id]
def __getattribute__(self, name):
try:
return object.__getattribute__(self, name)
except AttributeError:
pass
@asyncio.coroutine
def func(address, *args, **kwargs):
msg_id = sha1(str(random.getrandbits(255)).encode()).digest()
data = pickle.dumps([name, args, kwargs])
if len(data) > 8192:
raise RuntimeError('RPC message is too long! Max is 8K')
txdata = b'\x00' + msg_id + data + PM_END
isok = yield from self._send_request(address, txdata)
if kwargs.get('nowait', False):
return isok
if isok is False:
future = asyncio.Future()
future.set_result(None)
return future
loop = asyncio.get_event_loop()
timeout = loop.call_later(self.wait_response_time,
self._timeout, msg_id)
future = asyncio.Future()
self._outstanding[msg_id] = (future, timeout)
ret = yield from future
return ret
return func
def _send_response(self, address, data):
raise RuntimeError('not implemented')
def _send_request(self, address, txdata):
raise RuntimeError('not implemented')
def _timeout(self, msg_id):
args = (b64encode(msg_id), self.wait_response_time)
logger.info("Did not received reply for msg id %s within %i seconds"
% args)
# self._outstanding[msg_id][0].set_exception(TimeoutException())
self._outstanding[msg_id][0].set_result(None)
del self._outstanding[msg_id]
def _on_received(self, data, addr):
if self.buf:
data = self.buf + data
while True:
found = data.find(PM_END)
if found == -1:
self.buf += data
return
self.buf = data[found+3:]
datagram = data[:found]
msg_id = datagram[1:21]
data = pickle.loads(datagram[21:])
if datagram[0] == PM_REQUEST:
self._accept_request(msg_id, data, addr)
elif datagram[0] == PM_RESPONSE:
self._accept_response(msg_id, data, addr)
else:
logger.info("Received unknown message from %s, ignoring",
repr(addr))
data = self.buf
class UDPRPC(AbstractRPC):
def datagram_received(self, data, addr):
self._on_received(data, addr)
@asyncio.coroutine
def _send_request(self, address, data):
self.transport.sendto(data, address)
def _send_response(self, address, data):
self.transport.sendto(data, address)
class TCPRPC(AbstractRPC):
def __init__(self, wait_response_time=5):
super().__init__(wait_response_time)
self.connections = {}
self.buf = bytes()
self._keep_alive_timeout = 30
def data_received(self, data):
addr = self.transport.get_extra_info('peername')
self._on_received(data, addr)
def connection_lost(self, exc):
logger.info('connection is lost (%s)' % exc)
@asyncio.coroutine
def __connect(self, address):
loop = asyncio.get_event_loop()
transport, protocol = yield from \
loop.create_connection(lambda: self,
address[0], address[1])
return transport
@asyncio.coroutine
def _send_request(self, address, data):
transport = self.connections.get(address, None)
try:
if transport is None:
self.connections[address] = asyncio.Future()
transport = yield from self.__connect(address)
self.connections[address].set_result(transport)
self.connections[address] = transport
elif isinstance(transport, asyncio.Future):
transport = yield from transport
except OSError as err:
del self.connections[address]
logger.info(err)
return False
transport.write(data)
def _send_response(self, address, data):
self.transport.write(data)
| 30.157205
| 77
| 0.589343
|
3b66cf9721c9e0b3fef293dbc2e5d11cff77662e
| 1,011
|
py
|
Python
|
tests/test_str.py
|
uit-cosmo/2d-propagating-blobs
|
2c19458a5ba6d0d138461fadf3e935273bee4b5c
|
[
"MIT"
] | 1
|
2021-10-02T17:58:16.000Z
|
2021-10-02T17:58:16.000Z
|
tests/test_str.py
|
uit-cosmo/2d_propagating_blobs
|
2c19458a5ba6d0d138461fadf3e935273bee4b5c
|
[
"MIT"
] | 20
|
2021-10-04T10:44:34.000Z
|
2022-01-28T15:20:39.000Z
|
tests/test_str.py
|
uit-cosmo/2d-propagating-blobs
|
2c19458a5ba6d0d138461fadf3e935273bee4b5c
|
[
"MIT"
] | 1
|
2021-12-06T13:31:58.000Z
|
2021-12-06T13:31:58.000Z
|
import pytest
from blobmodel import Model
from blobmodel.geometry import Geometry
def test_blob_shape_exception():
with pytest.raises(NotImplementedError):
bm = Model(
Nx=2,
Ny=2,
Lx=10,
Ly=10,
dt=0.5,
T=1,
periodic_y=False,
blob_shape="different_shape",
num_blobs=1,
)
bm.make_realization(speed_up=True, error=0.1)
def test_geometry_str():
geo = Geometry(1, 1, 1, 1, 1, 1, False)
assert (
str(geo)
== "Geometry parameters: Nx:1, Ny:1, Lx:1, Ly:1, dt:1, T:1, y-periodicity:False"
)
def test_model_str():
bm = Model(
Nx=2,
Ny=2,
Lx=10,
Ly=10,
dt=0.5,
T=1,
periodic_y=False,
blob_shape="exp",
num_blobs=1,
)
assert str(bm) == "2d Blob Model with blob shape:exp, num_blobs:1 and t_drain:10"
test_blob_shape_exception()
test_geometry_str()
test_model_str()
| 21.0625
| 90
| 0.545994
|
e90b535f739ae9f77823717ce766f80ad9ae27bb
| 6,143
|
py
|
Python
|
week_04/feature_preprocessing.py
|
MrRozum/DeepLearning_Winter22
|
3bbce7315b342036d6c050e82170fa5d4c4b4993
|
[
"MIT"
] | 5
|
2022-02-01T07:25:28.000Z
|
2022-02-02T13:58:34.000Z
|
week_04/feature_preprocessing.py
|
MrRozum/DeepLearning_Winter22
|
3bbce7315b342036d6c050e82170fa5d4c4b4993
|
[
"MIT"
] | 2
|
2021-06-14T21:11:02.000Z
|
2021-06-30T20:03:39.000Z
|
week_04/feature_preprocessing.py
|
MrRozum/DeepLearning_Winter22
|
3bbce7315b342036d6c050e82170fa5d4c4b4993
|
[
"MIT"
] | 8
|
2021-04-07T07:38:20.000Z
|
2021-04-24T06:08:01.000Z
|
import warnings
warnings.filterwarnings("ignore")
import numpy as np
import pandas as pd
import seaborn as sns
import matplotlib.pyplot as plt
plt.style.use('ggplot')
import torch
print(torch.__version__)
import torch.nn as nn
import torch.optim as optim
import torch.utils.data as data_utils
from torch.utils.data import DataLoader, Dataset, Sampler
from torch.utils.data.dataloader import default_collate
from torch.utils.tensorboard import SummaryWriter
from pytorch_lightning.metrics import Accuracy
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import LabelEncoder
INPUT_SIZE = 36
HIDDEN_SIZE = 25
OUTPUT_SIZE = 5
LEARNING_RATE = 1e-2
EPOCHS = 400
BATCH_SIZE = 256
EMBEDDING_SIZE = 5
class CustomDataset(Dataset):
# Конструктор, где считаем датасет
def __init__(self):
X = pd.read_csv('./data/X_cat.csv', sep='\t', index_col=0)
target = pd.read_csv('./data/y_cat.csv', sep='\t', index_col=0, names=['status']) # header=-1,
weekday_columns = ['Weekday_0', 'Weekday_1', 'Weekday_2',
'Weekday_3', 'Weekday_4', 'Weekday_5', 'Weekday_6']
weekdays = np.argmax(X[weekday_columns].values, axis=1)
X.drop(weekday_columns, axis=1, inplace=True)
X['Weekday_cos'] = np.cos(2 * np.pi / 7.) * weekdays
X['Weekday_sin'] = np.sin(2 * np.pi / 7.) * weekdays
X['Hour_cos'] = np.cos(2 * np.pi / 24.) * X['Hour'].values
X['Hour_sin'] = np.sin(2 * np.pi / 24.) * X['Hour'].values
X['Month_cos'] = np.cos(2 * np.pi / 12.) * X['Month'].values
X['Month_sin'] = np.sin(2 * np.pi / 12.) * X['Month'].values
X['Gender'] = np.argmax(X[['Sex_Female', 'Sex_Male', 'Sex_Unknown']].values, axis=1)
X.drop(['Sex_Female', 'Sex_Male', 'Sex_Unknown'], axis=1, inplace=True)
print(X.shape)
print(X.head())
target = target.iloc[:, :].values
target[target == 'Died'] = 'Euthanasia'
le = LabelEncoder()
self.y = le.fit_transform(target)
self.X = X.values
self.columns = X.columns.values
self.embedding_column = 'Gender'
self.nrof_emb_categories = 3
self.numeric_columns = ['IsDog', 'Age', 'HasName', 'NameLength', 'NameFreq', 'MixColor', 'ColorFreqAsIs',
'ColorFreqBase', 'TabbyColor', 'MixBreed', 'Domestic', 'Shorthair', 'Longhair',
'Year', 'Day', 'Breed_Chihuahua Shorthair Mix', 'Breed_Domestic Medium Hair Mix',
'Breed_Domestic Shorthair Mix', 'Breed_German Shepherd Mix', 'Breed_Labrador Retriever Mix',
'Breed_Pit Bull Mix', 'Breed_Rare',
'SexStatus_Flawed', 'SexStatus_Intact', 'SexStatus_Unknown',
'Weekday_cos', 'Weekday_sin', 'Hour_cos', 'Hour_sin',
'Month_cos', 'Month_sin']
return
def __len__(self):
return len(self.X)
# Переопределяем метод,
# который достает по индексу наблюдение из датасет
def __getitem__(self, idx):
row = self.X[idx, :]
row = {col: torch.tensor(row[i]) for i, col in enumerate(self.columns)}
return row, self.y[idx]
class MLPNet(nn.Module):
def __init__(self, input_size, hidden_size, output_size, nrof_cat, emb_dim,
emb_columns, numeric_columns):
super(MLPNet, self).__init__()
self.emb_columns = emb_columns
self.numeric_columns = numeric_columns
self.emb_layer = torch.nn.Embedding(nrof_cat, emb_dim)
self.feature_bn = torch.nn.BatchNorm1d(input_size)
self.linear1 = torch.nn.Linear(input_size, hidden_size)
self.linear1.apply(self.init_weights)
self.bn1 = torch.nn.BatchNorm1d(hidden_size)
self.linear2 = torch.nn.Linear(hidden_size, hidden_size)
self.linear2.apply(self.init_weights)
self.bn2 = torch.nn.BatchNorm1d(hidden_size)
self.linear3 = torch.nn.Linear(hidden_size, output_size)
def init_weights(self, m):
if type(m) == nn.Linear:
torch.nn.init.xavier_uniform(m.weight)
# m.bias.data.fill_(0.001)
def forward(self, x):
emb_output = self.emb_layer(torch.tensor(x[self.emb_columns], dtype=torch.int64))
numeric_feats = torch.tensor(pd.DataFrame(x)[self.numeric_columns].values, dtype=torch.float32)
concat_input = torch.cat([numeric_feats, emb_output], dim=1)
output = self.feature_bn(concat_input)
output = self.linear1(output)
output = self.bn1(output)
output = torch.relu(output)
output = self.linear2(output)
output = self.bn2(output)
output = torch.relu(output)
output = self.linear3(output)
predictions = torch.softmax(output, dim=1)
return predictions
def run_train(model, train_loader):
step = 0
for epoch in range(EPOCHS):
model.train()
for features, label in train_loader:
# Reset gradients
optimizer.zero_grad()
output = model(features)
# Calculate error and backpropagate
loss = criterion(output, label)
loss.backward()
acc = accuracy(output, label).item()
# Update weights with gradients
optimizer.step()
step += 1
if step % 100 == 0:
print('EPOCH %d STEP %d : train_loss: %f train_acc: %f' %
(epoch, step, loss.item(), acc))
return step
animal_dataset = CustomDataset()
train_loader = data_utils.DataLoader(dataset=animal_dataset,
batch_size=BATCH_SIZE, shuffle=True)
model = MLPNet(INPUT_SIZE, HIDDEN_SIZE, OUTPUT_SIZE, animal_dataset.nrof_emb_categories,
EMBEDDING_SIZE,
animal_dataset.embedding_column, animal_dataset.numeric_columns)
criterion = nn.CrossEntropyLoss()
accuracy = Accuracy()
optimizer = optim.Adam(model.parameters(), lr=LEARNING_RATE)
step = run_train(model, train_loader)
| 32.850267
| 124
| 0.621846
|
46519bdc3adc16e6473b990b3ceab058328351f5
| 2,107
|
py
|
Python
|
indico/web/forms/fields/protection.py
|
bpedersen2/indico
|
8410ee5f8f8530a8692f3dd2d4015c3074b0aa30
|
[
"MIT"
] | 1
|
2021-02-24T10:20:14.000Z
|
2021-02-24T10:20:14.000Z
|
indico/web/forms/fields/protection.py
|
bpedersen2/indico
|
8410ee5f8f8530a8692f3dd2d4015c3074b0aa30
|
[
"MIT"
] | 5
|
2021-04-08T19:26:47.000Z
|
2022-01-24T16:30:18.000Z
|
indico/web/forms/fields/protection.py
|
bpedersen2/indico
|
8410ee5f8f8530a8692f3dd2d4015c3074b0aa30
|
[
"MIT"
] | 2
|
2019-02-24T17:29:10.000Z
|
2021-04-08T19:23:27.000Z
|
# This file is part of Indico.
# Copyright (C) 2002 - 2021 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
from flask import render_template
from markupsafe import Markup
from indico.core.db import db
from indico.core.db.sqlalchemy.protection import ProtectionMode
from indico.util.i18n import _
from indico.web.forms.fields import IndicoEnumRadioField
from indico.web.forms.widgets import JinjaWidget
class IndicoProtectionField(IndicoEnumRadioField):
widget = JinjaWidget('forms/protection_widget.html', single_kwargs=True)
radio_widget = JinjaWidget('forms/radio_buttons_widget.html', orientation='horizontal', single_kwargs=True)
def __init__(self, *args, **kwargs):
self.protected_object = kwargs.pop('protected_object')(kwargs['_form'])
get_acl_message_url = kwargs.pop('acl_message_url', None)
self.acl_message_url = get_acl_message_url(kwargs['_form']) if get_acl_message_url else None
self.can_inherit_protection = self.protected_object.protection_parent is not None
if not self.can_inherit_protection:
kwargs['skip'] = {ProtectionMode.inheriting}
super().__init__(*args, enum=ProtectionMode, **kwargs)
def render_protection_message(self):
protected_object = self.get_form().protected_object
if hasattr(protected_object, 'get_non_inheriting_objects'):
non_inheriting_objects = protected_object.get_non_inheriting_objects()
else:
non_inheriting_objects = []
if isinstance(protected_object.protection_parent, db.m.Event):
parent_type = _('Event')
elif isinstance(protected_object.protection_parent, db.m.Category):
parent_type = _('Category')
else:
parent_type = _('Session')
rv = render_template('_protection_info.html', field=self, protected_object=protected_object,
parent_type=parent_type, non_inheriting_objects=non_inheriting_objects)
return Markup(rv)
| 45.804348
| 111
| 0.728524
|
0b6073a7cd51dad23173cb33a42118d333820dbb
| 7,698
|
py
|
Python
|
tests/python/Lut1DTransformTest.py
|
Shrinks99/OpenColorIO
|
94ca1fc2f0c0eae3a8678d7fe3c98cfef70f5545
|
[
"BSD-3-Clause"
] | null | null | null |
tests/python/Lut1DTransformTest.py
|
Shrinks99/OpenColorIO
|
94ca1fc2f0c0eae3a8678d7fe3c98cfef70f5545
|
[
"BSD-3-Clause"
] | null | null | null |
tests/python/Lut1DTransformTest.py
|
Shrinks99/OpenColorIO
|
94ca1fc2f0c0eae3a8678d7fe3c98cfef70f5545
|
[
"BSD-3-Clause"
] | null | null | null |
# SPDX-License-Identifier: BSD-3-Clause
# Copyright Contributors to the OpenColorIO Project.
import logging
import unittest
logger = logging.getLogger(__name__)
try:
import numpy as np
except ImportError:
logger.warning(
"NumPy could not be imported. "
"Test case will lack significant coverage!"
)
np = None
import PyOpenColorIO as OCIO
class Lut1DTransformTest(unittest.TestCase):
def test_default_constructor(self):
"""
Test the default constructor.
"""
lut = OCIO.Lut1DTransform()
self.assertEqual(lut.getLength(), 2)
self.assertEqual(lut.getDirection(), OCIO.TRANSFORM_DIR_FORWARD)
self.assertEqual(lut.getHueAdjust(), OCIO.HUE_NONE)
self.assertFalse(lut.getInputHalfDomain())
self.assertFalse(lut.getOutputRawHalfs())
self.assertEqual(lut.getInterpolation(), OCIO.INTERP_DEFAULT)
self.assertEqual(lut.getFileOutputBitDepth(), OCIO.BIT_DEPTH_UNKNOWN)
r, g, b = lut.getValue(0)
self.assertEqual([r, g, b], [0, 0, 0])
r, g, b = lut.getValue(1)
self.assertEqual([r, g, b], [1, 1, 1])
def test_direction(self):
"""
Test the setDirection() and getDirection() methods.
"""
lut = OCIO.Lut1DTransform()
for direction in OCIO.TransformDirection.__members__.values():
lut.setDirection(direction)
self.assertEqual(lut.getDirection(), direction)
# Wrong type tests.
for invalid in (None, 1, 'test'):
with self.assertRaises(TypeError):
lut.setDirection(invalid)
def test_format_metadata(self):
"""
Test the getFormatMetadata() method.
"""
lut = OCIO.Lut1DTransform()
format_metadata = lut.getFormatMetadata()
self.assertIsInstance(format_metadata, OCIO.FormatMetadata)
self.assertEqual(format_metadata.getElementName(), 'ROOT')
self.assertEqual(format_metadata.getName(), '')
self.assertEqual(format_metadata.getID(), '')
format_metadata.setName('name')
format_metadata.setID('id')
self.assertEqual(format_metadata.getName(), 'name')
self.assertEqual(format_metadata.getID(), 'id')
def test_file_output_bit_depth(self):
"""
Test get/setFileOutputBitDepth.
"""
lut = OCIO.Lut1DTransform()
self.assertEqual(lut.getFileOutputBitDepth(), OCIO.BIT_DEPTH_UNKNOWN)
lut.setFileOutputBitDepth(OCIO.BIT_DEPTH_UINT10)
self.assertEqual(lut.getFileOutputBitDepth(), OCIO.BIT_DEPTH_UINT10)
def test_hue_adjust(self):
"""
Test get/setHueAdjust.
"""
lut = OCIO.Lut1DTransform()
self.assertEqual(lut.getHueAdjust(), OCIO.HUE_NONE)
lut.setHueAdjust(OCIO.HUE_DW3)
self.assertEqual(lut.getHueAdjust(), OCIO.HUE_DW3)
with self.assertRaises(OCIO.Exception):
lut.setHueAdjust(OCIO.HUE_WYPN)
def test_input_half_domain(self):
"""
Test get/getInputHalfDomain.
"""
lut = OCIO.Lut1DTransform()
self.assertFalse(lut.getInputHalfDomain())
lut.setInputHalfDomain(True)
self.assertTrue(lut.getInputHalfDomain())
def test_output_raw_halfs(self):
"""
Test get/setOutputRawHalfs.
"""
lut = OCIO.Lut1DTransform()
self.assertFalse(lut.getOutputRawHalfs())
lut.setOutputRawHalfs(True)
self.assertTrue(lut.getOutputRawHalfs())
def test_length(self):
"""
Test get/setLength.
"""
lut = OCIO.Lut1DTransform()
self.assertEqual(lut.getLength(), 2)
lut.setValue(0, 0.1, 0.2, 0.3)
lut.setLength(3)
self.assertEqual(lut.getLength(), 3)
# Changing the length reset LUT values to identity.
r, g, b = lut.getValue(0)
self.assertEqual([r, g, b], [0, 0, 0])
def test_constructor_with_keywords(self):
"""
Test Lut1DTransform constructor with keywords and validate its values.
"""
lut = OCIO.Lut1DTransform(
length=65536,
inputHalfDomain=True,
outputRawHalfs=True,
fileOutputBitDepth=OCIO.BIT_DEPTH_UINT10,
hueAdjust=OCIO.HUE_DW3,
interpolation=OCIO.INTERP_BEST,
direction=OCIO.TRANSFORM_DIR_INVERSE)
self.assertEqual(lut.getLength(), 65536)
self.assertEqual(lut.getDirection(), OCIO.TRANSFORM_DIR_INVERSE)
self.assertEqual(lut.getHueAdjust(), OCIO.HUE_DW3)
self.assertTrue(lut.getInputHalfDomain())
self.assertTrue(lut.getOutputRawHalfs())
self.assertEqual(lut.getInterpolation(), OCIO.INTERP_BEST)
self.assertEqual(lut.getFileOutputBitDepth(), OCIO.BIT_DEPTH_UINT10)
lut = OCIO.Lut1DTransform(
length=4,
direction=OCIO.TRANSFORM_DIR_INVERSE)
self.assertEqual(lut.getLength(), 4)
self.assertEqual(lut.getDirection(), OCIO.TRANSFORM_DIR_INVERSE)
self.assertEqual(lut.getHueAdjust(), OCIO.HUE_NONE)
self.assertFalse(lut.getInputHalfDomain())
self.assertFalse(lut.getOutputRawHalfs())
self.assertEqual(lut.getInterpolation(), OCIO.INTERP_DEFAULT)
self.assertEqual(lut.getFileOutputBitDepth(), OCIO.BIT_DEPTH_UNKNOWN)
def test_constructor_with_positional(self):
"""
Test Lut1DTransform constructor without keywords and validate its values.
"""
lut = OCIO.Lut1DTransform(65536, True, True, OCIO.BIT_DEPTH_UINT10,
OCIO.HUE_DW3, OCIO.INTERP_BEST,
OCIO.TRANSFORM_DIR_INVERSE)
self.assertEqual(lut.getLength(), 65536)
self.assertEqual(lut.getDirection(), OCIO.TRANSFORM_DIR_INVERSE)
self.assertEqual(lut.getHueAdjust(), OCIO.HUE_DW3)
self.assertTrue(lut.getInputHalfDomain())
self.assertTrue(lut.getOutputRawHalfs())
self.assertEqual(lut.getInterpolation(), OCIO.INTERP_BEST)
self.assertEqual(lut.getFileOutputBitDepth(), OCIO.BIT_DEPTH_UINT10)
def test_array(self):
"""
Get & set Lut array values.
"""
lut = OCIO.Lut1DTransform(length=3)
r, g, b = lut.getValue(0)
self.assertEqual([r, g, b], [0, 0, 0])
r, g, b = lut.getValue(1)
self.assertEqual([r, g, b], [0.5, 0.5, 0.5])
r, g, b = lut.getValue(2)
self.assertEqual([r, g, b], [1, 1, 1])
lut.setValue(0, 0.1, 0.2, 0.3)
r, g, b = lut.getValue(0)
# Values are stored as float.
self.assertAlmostEqual(r, 0.1, delta=1e-6)
self.assertAlmostEqual(g, 0.2, delta=1e-6)
self.assertAlmostEqual(b, 0.3, delta=1e-6)
if not np:
logger.warning("NumPy not found. Skipping part of test!")
return
data = lut.getData()
expected = np.array([0.1, 0.2, 0.3,
0.5, 0.5, 0.5,
1., 1., 1.]).astype(np.float32)
self.assertEqual(data.all(), expected.all())
data[6] = 0.9
data[7] = 1.1
data[8] = 1.2
lut.setData(data)
r, g, b = lut.getValue(2)
self.assertAlmostEqual(r, 0.9, delta=1e-6)
self.assertAlmostEqual(g, 1.1, delta=1e-6)
self.assertAlmostEqual(b, 1.2, delta=1e-6)
def test_equals(self):
"""
Test equals.
"""
lut = OCIO.Lut1DTransform()
lut2 = OCIO.Lut1DTransform()
self.assertTrue(lut.equals(lut2))
lut.setValue(0, 0.1, 0.2, 0.3)
self.assertFalse(lut.equals(lut2))
| 35.474654
| 81
| 0.615095
|
82fe853041bb9d462b91a5fbe0e8660c732514db
| 8,513
|
py
|
Python
|
configs/cascade_rcnn_x101_32x4d_fpn_1x.py
|
eryuehouniao/mmdetection
|
e80df144aeb2000116f1a8deb98fa4916b1fe5c3
|
[
"Apache-2.0"
] | 1
|
2019-10-29T06:45:12.000Z
|
2019-10-29T06:45:12.000Z
|
configs/cascade_rcnn_x101_32x4d_fpn_1x.py
|
eryuehouniao/mmdetection
|
e80df144aeb2000116f1a8deb98fa4916b1fe5c3
|
[
"Apache-2.0"
] | null | null | null |
configs/cascade_rcnn_x101_32x4d_fpn_1x.py
|
eryuehouniao/mmdetection
|
e80df144aeb2000116f1a8deb98fa4916b1fe5c3
|
[
"Apache-2.0"
] | 1
|
2020-09-24T12:17:55.000Z
|
2020-09-24T12:17:55.000Z
|
# model settings
model = dict(
type='CascadeRCNN',
num_stages=3,
# pretrained='open-mmlab://resnext101_32x4d',
pretrained=None,
backbone=dict(
type='ResNeXt',
depth=101,
groups=32,
base_width=4,
num_stages=4,
out_indices=(0, 1, 2, 3),
frozen_stages=1,
style='pytorch',
# dcn=dict(
# modulated=False,
# groups=32,
# deformable_groups=1,
# fallback_on_stride=False),
# stage_with_dcn=(False, True, True, True),
),
neck=dict(
type='FPN',
in_channels=[256, 512, 1024, 2048],
out_channels=256,
num_outs=5),
rpn_head=dict(
type='RPNHead',
in_channels=256,
feat_channels=256,
anchor_scales=[8],
anchor_ratios=[0.02, 0.05, 0.1, 0.5, 1.0, 2.0, 10.0, 20.0, 50.0],
anchor_strides=[4, 8, 16, 32, 64],
target_means=[.0, .0, .0, .0],
target_stds=[1.0, 1.0, 1.0, 1.0],
loss_cls=dict(
type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0),
loss_bbox=dict(type='SmoothL1Loss', beta=1.0 / 9.0, loss_weight=1.0)),
bbox_roi_extractor=dict(
type='SingleRoIExtractor',
roi_layer=dict(type='RoIAlign', out_size=7, sample_num=2),
out_channels=256,
featmap_strides=[4, 8, 16, 32]),
bbox_head=[
dict(
type='SharedFCBBoxHead',
num_fcs=2,
in_channels=256,
fc_out_channels=1024,
roi_feat_size=7,
num_classes=16,
target_means=[0., 0., 0., 0.],
target_stds=[0.1, 0.1, 0.2, 0.2],
reg_class_agnostic=True,
loss_cls=dict(
type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0),
loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0)),
dict(
type='SharedFCBBoxHead',
num_fcs=2,
in_channels=256,
fc_out_channels=1024,
roi_feat_size=7,
num_classes=16,
target_means=[0., 0., 0., 0.],
target_stds=[0.05, 0.05, 0.1, 0.1],
reg_class_agnostic=True,
loss_cls=dict(
type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0),
loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0)),
dict(
type='SharedFCBBoxHead',
num_fcs=2,
in_channels=256,
fc_out_channels=1024,
roi_feat_size=7,
num_classes=16,
target_means=[0., 0., 0., 0.],
target_stds=[0.033, 0.033, 0.067, 0.067],
reg_class_agnostic=True,
loss_cls=dict(
type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0),
loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0))
])
# model training and testing settings
train_cfg = dict(
rpn=dict(
assigner=dict(
type='MaxIoUAssigner',
pos_iou_thr=0.7,
neg_iou_thr=0.3,
min_pos_iou=0.3,
ignore_iof_thr=-1),
sampler=dict(
type='RandomSampler',
num=256,
pos_fraction=0.5,
neg_pos_ub=-1,
add_gt_as_proposals=False),
allowed_border=0,
pos_weight=-1,
debug=False),
rpn_proposal=dict(
nms_across_levels=False,
nms_pre=2000,
nms_post=2000,
max_num=2000,
nms_thr=0.7,
min_bbox_size=0),
rcnn=[
dict(
assigner=dict(
type='MaxIoUAssigner',
pos_iou_thr=0.5,
neg_iou_thr=0.5,
min_pos_iou=0.5,
ignore_iof_thr=-1),
sampler=dict(
type='OHEMSampler',
num=512,
pos_fraction=0.25,
neg_pos_ub=-1,
add_gt_as_proposals=True),
pos_weight=-1,
debug=False),
dict(
assigner=dict(
type='MaxIoUAssigner',
pos_iou_thr=0.6,
neg_iou_thr=0.6,
min_pos_iou=0.6,
ignore_iof_thr=-1),
sampler=dict(
type='OHEMSampler',
num=512,
pos_fraction=0.25,
neg_pos_ub=-1,
add_gt_as_proposals=True),
pos_weight=-1,
debug=False),
dict(
assigner=dict(
type='MaxIoUAssigner',
pos_iou_thr=0.7,
neg_iou_thr=0.7,
min_pos_iou=0.7,
ignore_iof_thr=-1),
sampler=dict(
type='OHEMSampler',
num=512,
pos_fraction=0.25,
neg_pos_ub=-1,
add_gt_as_proposals=True),
pos_weight=-1,
debug=False)
],
stage_loss_weights=[1, 0.5, 0.25])
test_cfg = dict(
rpn=dict(
nms_across_levels=False,
nms_pre=1000,
nms_post=1000,
max_num=1000,
nms_thr=0.7,
min_bbox_size=0),
rcnn=dict(
score_thr=0.05, nms=dict(type='nms', iou_thr=0.5), max_per_img=100),
keep_all_stages=False)
# dataset settings
dataset_type = 'CocoDataset'
data_root = 'data/coco/'
img_norm_cfg = dict(
mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True)
train_pipeline = [
dict(type='LoadImageFromFile'),
dict(type='LoadAnnotations', with_bbox=True),
dict(type='Resize', img_scale=(2048, 905), keep_ratio=True),
dict(type='RandomFlip', flip_ratio=0.5),
dict(type='Normalize', **img_norm_cfg),
dict(type='Pad', size_divisor=32),
dict(type='DefaultFormatBundle'),
dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']),
]
test_pipeline = [
dict(type='LoadImageFromFile'),
dict(
type='MultiScaleFlipAug',
img_scale=(1228, 614),
flip=True,
transforms=[
dict(type='Resize', keep_ratio=True),
dict(type='RandomFlip'),
dict(type='Normalize', **img_norm_cfg),
dict(type='Pad', size_divisor=32),
dict(type='ImageToTensor', keys=['img']),
dict(type='Collect', keys=['img']),
])
]
data = dict(
imgs_per_gpu=1,
workers_per_gpu=2,
train=dict(
type=dataset_type,
ann_file=["/data1/bupi_data/round2/sparse_train_2coco_padding_1.json",
"/data1/bupi_data/round2/val_coco.json",
"/data1/bupi_data/round2/crop_val_image/after_slice_coco.json",
"/data1/bupi_data/round2/dense_crop_train_image/crop_dense_train_coco_fixbox.json",
"/data1/bupi_data/round2/sparse_crop_train_image/after_slice_coco.json"
],
img_prefix=["/data1/bupi_data/round2/sparse_trian_2coo_padding/",
"/data1/bupi_data/round2/val/",
"/data1/bupi_data/round2/crop_val_image/defect_image/",
"/data1/bupi_data/round2/dense_crop_train_image/defect/",
"/data1/bupi_data/round2/sparse_crop_train_image/defect_image/",
],
pipeline=train_pipeline),
val=dict(
type=dataset_type,
ann_file=data_root + 'annotations/instances_val2017.json',
img_prefix=data_root + 'val2017/',
pipeline=test_pipeline),
test=dict(
type=dataset_type,
ann_file=data_root + 'annotations/instances_val2017.json',
img_prefix=data_root + 'val2017/',
pipeline=test_pipeline))
# optimizer
optimizer = dict(type='SGD', lr=0.0025, momentum=0.9, weight_decay=0.0001)
optimizer_config = dict(grad_clip=dict(max_norm=35, norm_type=2))
# learning policy
lr_config = dict(
policy='step',
warmup='linear',
warmup_iters=500,
warmup_ratio=1.0 / 3,
step=[8, 11])
checkpoint_config = dict(interval=1)
# yapf:disable
log_config = dict(
interval=50,
hooks=[
dict(type='TextLoggerHook'),
# dict(type='TensorboardLoggerHook')
])
# yapf:enable
# runtime settings
total_epochs = 12
dist_params = dict(backend='nccl')
log_level = 'INFO'
work_dir = '/data1/lgj/bupi/round2/work_dirs/resnext101_data_aug/'
load_from = "/data1/lgj/bupi/round2/pretrained_model/epoch_12.pth"
# load_from = None
resume_from = None
workflow = [('train', 1)]
gpus_id = '0,1'
gpus_num = 2
| 32.996124
| 101
| 0.553389
|
7ffbf572ea3d56d9a205e9e1174bb29a3ebae148
| 11,638
|
py
|
Python
|
tensorqtl/tensorqtl.py
|
susie-song/tensorqtl
|
97d6f26eae9c2d8624214c4e15b52c528e823001
|
[
"BSD-3-Clause"
] | null | null | null |
tensorqtl/tensorqtl.py
|
susie-song/tensorqtl
|
97d6f26eae9c2d8624214c4e15b52c528e823001
|
[
"BSD-3-Clause"
] | null | null | null |
tensorqtl/tensorqtl.py
|
susie-song/tensorqtl
|
97d6f26eae9c2d8624214c4e15b52c528e823001
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python3
from __future__ import print_function
import pandas as pd
import numpy as np
from datetime import datetime
import sys
import os
import argparse
sys.path.insert(1, os.path.dirname(__file__))
from core import *
from post import *
import genotypeio
import cis
import trans
def main():
parser = argparse.ArgumentParser(description='tensorQTL: GPU-based QTL mapper')
parser.add_argument('genotype_path', help='Genotypes in PLINK format')
parser.add_argument('phenotype_bed', help='Phenotypes in BED format')
parser.add_argument('prefix', help='Prefix for output file names')
parser.add_argument('--mode', default='cis', choices=['cis', 'cis_nominal', 'cis_independent', 'trans'], help='Mapping mode. Default: cis')
parser.add_argument('--covariates', default=None, help='Covariates file, tab-delimited, covariates x samples')
parser.add_argument('--permutations', type=int, default=10000, help='Number of permutations. Default: 10000')
parser.add_argument('--interaction', default=None, type=str, help='Interaction term(s)')
parser.add_argument('--cis_output', default=None, type=str, help="Output from 'cis' mode with q-values. Required for independent cis-QTL mapping.")
parser.add_argument('--phenotype_groups', default=None, type=str, help='Phenotype groups. Header-less TSV with two columns: phenotype_id, group_id')
parser.add_argument('--window', default=1000000, type=np.int32, help='Cis-window size, in bases. Default: 1000000.')
parser.add_argument('--pval_threshold', default=None, type=np.float64, help='Output only significant phenotype-variant pairs with a p-value below threshold. Default: 1e-5 for trans-QTL')
parser.add_argument('--maf_threshold', default=0, type=np.float64, help='Include only genotypes with minor allele frequency >= maf_threshold. Default: 0')
parser.add_argument('--maf_threshold_interaction', default=0.05, type=np.float64, help='MAF threshold for interactions, applied to lower and upper half of samples')
parser.add_argument('--return_dense', action='store_true', help='Return dense output for trans-QTL.')
parser.add_argument('--return_r2', action='store_true', help='Return r2 (only for sparse trans-QTL output)')
parser.add_argument('--best_only', action='store_true', help='Only write lead association for each phenotype (interaction mode only)')
parser.add_argument('--output_text', action='store_true', help='Write output in txt.gz format instead of parquet (trans-QTL mode only)')
parser.add_argument('--batch_size', type=int, default=20000, help='Batch size. Reduce this if encountering OOM errors.')
parser.add_argument('--load_split', action='store_true', help='Load genotypes into memory separately for each chromosome.')
parser.add_argument('--fdr', default=0.05, type=np.float64, help='FDR for cis-QTLs')
parser.add_argument('--qvalue_lambda', default=None, type=np.float64, help='lambda parameter for pi0est in qvalue.')
parser.add_argument('--seed', default=None, type=int, help='Seed for permutations.')
parser.add_argument('-o', '--output_dir', default='.', help='Output directory')
args = parser.parse_args()
# check inputs
if args.mode == 'cis_independent' and (args.cis_output is None or not os.path.exists(args.cis_output)):
raise ValueError("Output from 'cis' mode must be provided.")
if args.interaction is not None and args.mode not in ['cis_nominal', 'trans']:
raise ValueError("Interactions are only supported in 'cis_nominal' or 'trans' mode.")
logger = SimpleLogger(os.path.join(args.output_dir, f'{args.prefix}.tensorQTL.{args.mode}.log'))
logger.write(f'[{datetime.now().strftime("%b %d %H:%M:%S")}] Running TensorQTL: {args.mode.split("_")[0]}-QTL mapping')
if torch.cuda.is_available():
logger.write(f' * using GPU ({torch.cuda.get_device_name(torch.cuda.current_device())})')
else:
logger.write(' * WARNING: using CPU!')
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
if args.seed is not None:
logger.write(f' * using seed {args.seed}')
# load inputs
logger.write(f' * reading phenotypes ({args.phenotype_bed})')
phenotype_df, phenotype_pos_df = read_phenotype_bed(args.phenotype_bed)
tss_dict = phenotype_pos_df.T.to_dict()
if args.covariates is not None:
logger.write(f' * reading covariates ({args.covariates})')
covariates_df = pd.read_csv(args.covariates, sep='\t', index_col=0).T
assert phenotype_df.columns.equals(covariates_df.index)
if args.interaction is not None:
logger.write(f' * reading interaction term(s) ({args.interaction})')
# allow headerless input for single interactions
with open(args.interaction) as f:
f.readline()
s = f.readline().strip()
if len(s.split('\t')) == 2: # index + value
interaction_df = pd.read_csv(args.interaction, sep='\t', index_col=0, header=None)
else:
interaction_df = pd.read_csv(args.interaction, sep='\t', index_col=0)
# select samples
assert covariates_df.index.isin(interaction_df.index).all()
interaction_df = interaction_df.loc[covariates_df.index].astype(np.float32)
else:
interaction_df = None
if args.maf_threshold is None:
if args.mode == 'trans':
maf_threshold = 0.05
else:
maf_threshold = 0
else:
maf_threshold = args.maf_threshold
if args.phenotype_groups is not None:
group_s = pd.read_csv(args.phenotype_groups, sep='\t', index_col=0, header=None).squeeze('columns')
# verify sort order
group_dict = group_s.to_dict()
previous_group = ''
parsed_groups = 0
for i in phenotype_df.index:
if group_dict[i]!=previous_group:
parsed_groups += 1
previous_group = group_dict[i]
if not parsed_groups == len(group_s.unique()):
raise ValueError('Groups defined in input do not match phenotype file (check sort order).')
else:
group_s = None
# load genotypes
pr = genotypeio.PlinkReader(args.genotype_path, select_samples=phenotype_df.columns, dtype=np.int8)
variant_df = pr.bim.set_index('snp')[['chrom', 'pos']]
if args.mode != 'cis_nominal' or not args.load_split: # load all genotypes into memory
genotype_df = pd.DataFrame(pr.load_genotypes(), index=pr.bim['snp'], columns=pr.fam['iid'])
if args.mode.startswith('cis'):
if args.mode == 'cis':
res_df = cis.map_cis(genotype_df, variant_df, phenotype_df, phenotype_pos_df, covariates_df,
group_s=group_s, nperm=args.permutations, window=args.window,
maf_threshold=maf_threshold, logger=logger, seed=args.seed, verbose=True)
logger.write(' * writing output')
if has_rpy2:
calculate_qvalues(res_df, fdr=args.fdr, qvalue_lambda=args.qvalue_lambda, logger=logger)
out_file = os.path.join(args.output_dir, args.prefix+'.cis_qtl.txt.gz')
res_df.to_csv(out_file, sep='\t', float_format='%.6g')
elif args.mode == 'cis_nominal':
if not args.load_split:
cis.map_nominal(genotype_df, variant_df, phenotype_df, phenotype_pos_df, args.prefix, covariates_df=covariates_df,
interaction_df=interaction_df, maf_threshold_interaction=args.maf_threshold_interaction,
group_s=None, window=args.window, maf_threshold=maf_threshold, run_eigenmt=True,
output_dir=args.output_dir, write_top=True, write_stats=not args.best_only, logger=logger, verbose=True)
else: # load genotypes for each chromosome separately
top_df = []
for chrom in pr.chrs:
g, pos_s = pr.get_region(chrom)
genotype_df = pd.DataFrame(g, index=pos_s.index, columns=pr.fam['iid'])[phenotype_df.columns]
variant_df = pr.bim.set_index('snp')[['chrom', 'pos']]
chr_df = cis.map_nominal(genotype_df, variant_df[variant_df['chrom'] == chrom],
phenotype_df[phenotype_pos_df['chr'] == chrom], phenotype_pos_df[phenotype_pos_df['chr'] == chrom],
args.prefix, covariates_df=covariates_df,
interaction_df=interaction_df, maf_threshold_interaction=args.maf_threshold_interaction,
group_s=None, window=args.window, maf_threshold=maf_threshold, run_eigenmt=True,
output_dir=args.output_dir, write_top=True, write_stats=not args.best_only, logger=logger, verbose=True)
top_df.append(chr_df)
if interaction_df is not None:
top_df = pd.concat(top_df)
top_df.to_csv(os.path.join(args.output_dir, f'{args.prefix}.cis_qtl_top_assoc.txt.gz'),
sep='\t', float_format='%.6g')
elif args.mode == 'cis_independent':
summary_df = pd.read_csv(args.cis_output, sep='\t', index_col=0)
summary_df.rename(columns={'minor_allele_samples':'ma_samples', 'minor_allele_count':'ma_count'}, inplace=True)
res_df = cis.map_independent(genotype_df, variant_df, summary_df, phenotype_df, phenotype_pos_df, covariates_df,
group_s=group_s, fdr=args.fdr, nperm=args.permutations, window=args.window,
maf_threshold=maf_threshold, logger=logger, seed=args.seed, verbose=True)
logger.write(' * writing output')
out_file = os.path.join(args.output_dir, args.prefix+'.cis_independent_qtl.txt.gz')
res_df.to_csv(out_file, sep='\t', index=False, float_format='%.6g')
elif args.mode == 'trans':
return_sparse = not args.return_dense
pval_threshold = args.pval_threshold
if pval_threshold is None and return_sparse:
pval_threshold = 1e-5
logger.write(f' * p-value threshold: {pval_threshold:.2g}')
if interaction_df is not None:
if interaction_df.shape[1] > 1:
raise NotImplementedError('trans-QTL mapping currently only supports a single interaction.')
else:
interaction_df = interaction_df.squeeze('columns')
pairs_df = trans.map_trans(genotype_df, phenotype_df, covariates_df, interaction_s=interaction_df,
return_sparse=return_sparse, pval_threshold=pval_threshold,
maf_threshold=maf_threshold, batch_size=args.batch_size,
return_r2=args.return_r2, logger=logger)
logger.write(' * filtering out cis-QTLs (within +/-5Mb)')
pairs_df = trans.filter_cis(pairs_df, tss_dict, variant_df, window=5000000)
logger.write(' * writing output')
if not args.output_text:
pairs_df.to_parquet(os.path.join(args.output_dir, args.prefix+'.trans_qtl_pairs.parquet'))
else:
out_file = os.path.join(args.output_dir, args.prefix+'.trans_qtl_pairs.txt.gz')
pairs_df.to_csv(out_file, sep='\t', index=False, float_format='%.6g')
logger.write(f'[{datetime.now().strftime("%b %d %H:%M:%S")}] Finished mapping')
if __name__ == '__main__':
main()
| 60.931937
| 190
| 0.653892
|
e115b84c1b43f585764d4356ab8fd6427bb85efd
| 292
|
py
|
Python
|
docs/build/docutils/test/functional/tests/math_output_latex.py
|
mjtamlyn/django-braces
|
8adc9bc4f5139e3d032d4e38657bf86413388b78
|
[
"BSD-3-Clause"
] | 1
|
2015-03-22T16:49:07.000Z
|
2015-03-22T16:49:07.000Z
|
docs/build/docutils/test/functional/tests/math_output_latex.py
|
mjtamlyn/django-braces
|
8adc9bc4f5139e3d032d4e38657bf86413388b78
|
[
"BSD-3-Clause"
] | null | null | null |
docs/build/docutils/test/functional/tests/math_output_latex.py
|
mjtamlyn/django-braces
|
8adc9bc4f5139e3d032d4e38657bf86413388b78
|
[
"BSD-3-Clause"
] | null | null | null |
# Source and destination file names.
test_source = "data/math.txt"
test_destination = "math_output_latex.html"
# Keyword parameters passed to publish_file.
reader_name = "standalone"
parser_name = "rst"
writer_name = "html"
# Extra setting
settings_overrides['math_output'] = 'latex'
| 20.857143
| 44
| 0.756849
|
5629458ddf65146eec3a0907db863041c0f2409c
| 2,775
|
py
|
Python
|
DQM/TrackingMonitor/python/TrackEfficiencyMonitor_cfi.py
|
ckamtsikis/cmssw
|
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
|
[
"Apache-2.0"
] | 852
|
2015-01-11T21:03:51.000Z
|
2022-03-25T21:14:00.000Z
|
DQM/TrackingMonitor/python/TrackEfficiencyMonitor_cfi.py
|
ckamtsikis/cmssw
|
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
|
[
"Apache-2.0"
] | 30,371
|
2015-01-02T00:14:40.000Z
|
2022-03-31T23:26:05.000Z
|
DQM/TrackingMonitor/python/TrackEfficiencyMonitor_cfi.py
|
ckamtsikis/cmssw
|
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
|
[
"Apache-2.0"
] | 3,240
|
2015-01-02T05:53:18.000Z
|
2022-03-31T17:24:21.000Z
|
# The following comments couldn't be translated into the new config version:
# All/OuterSurface/InnerSurface/ImpactPoint/default(track)
#
import FWCore.ParameterSet.Config as cms
from DQMServices.Core.DQMEDAnalyzer import DQMEDAnalyzer
TrackEffMon = DQMEDAnalyzer('TrackEfficiencyMonitor',
theRadius = cms.double(85.0),
theMaxZ = cms.double(110.0),
isBFieldOff = cms.bool(False),
TKTrackCollection = cms.InputTag("rsWithMaterialTracksP5"),
STATrackCollection = cms.InputTag("cosmicMuons"),
trackEfficiency = cms.bool(True),
OutputMEsInRootFile = cms.bool(False),
OutputFileName = cms.string('MonitorTrackEfficiency.root'),
FolderName = cms.string('Track/Efficiencies'),
AlgoName = cms.string('CTF'),
muoncoll = cms.InputTag('muons'),
muonXBin = cms.int32(50),
muonXMin = cms.double(-100),
muonXMax = cms.double(100),
muonYBin = cms.int32(50),
muonYMin = cms.double(-100),
muonYMax = cms.double(100),
muonZBin = cms.int32(50),
muonZMin = cms.double(-500),
muonZMax = cms.double(500),
muonEtaBin = cms.int32(50),
muonEtaMin = cms.double(-3.2),
muonEtaMax = cms.double(3.2),
muonPhiBin = cms.int32(50),
muonPhiMin = cms.double(-3.2),
muonPhiMax = cms.double(0.),
muonD0Bin = cms.int32(50),
muonD0Min = cms.double(-100),
muonD0Max = cms.double(100),
muonCompatibleLayersBin = cms.int32(10),
muonCompatibleLayersMin = cms.double(0),
muonCompatibleLayersMax = cms.double(30),
trackXBin = cms.int32(50),
trackXMin = cms.double(-100),
trackXMax = cms.double(100),
trackYBin = cms.int32(50),
trackYMin = cms.double(-100),
trackYMax = cms.double(100),
trackZBin = cms.int32(50),
trackZMin = cms.double(-500),
trackZMax = cms.double(500),
trackEtaBin = cms.int32(50),
trackEtaMin = cms.double(-3.2),
trackEtaMax = cms.double(3.2),
trackPhiBin = cms.int32(50),
trackPhiMin = cms.double(-3.2),
trackPhiMax = cms.double(0.),
trackD0Bin = cms.int32(50),
trackD0Min = cms.double(-100),
trackD0Max = cms.double(100),
trackCompatibleLayersBin = cms.int32(10),
trackCompatibleLayersMin = cms.double(0),
trackCompatibleLayersMax = cms.double(30),
deltaXBin = cms.int32(50),
deltaXMin = cms.double(-100),
deltaXMax = cms.double(100),
deltaYBin = cms.int32(50),
deltaYMin = cms.double(-100),
deltaYMax = cms.double(100),
signDeltaXBin = cms.int32(50),
signDeltaXMin = cms.double(-5),
signDeltaXMax = cms.double(5),
signDeltaYBin = cms.int32(50),
signDeltaYMin = cms.double(-5),
signDeltaYMax = cms.double(5),
)
| 28.608247
| 76
| 0.641802
|
b149f7ef4b3a745f29a9235debfd572e21452081
| 2,327
|
py
|
Python
|
core/api/users/exceptions.py
|
p-panagiotis/venom
|
8544f44b10e95bd3964ddde997cda4169c6d34f0
|
[
"MIT"
] | null | null | null |
core/api/users/exceptions.py
|
p-panagiotis/venom
|
8544f44b10e95bd3964ddde997cda4169c6d34f0
|
[
"MIT"
] | null | null | null |
core/api/users/exceptions.py
|
p-panagiotis/venom
|
8544f44b10e95bd3964ddde997cda4169c6d34f0
|
[
"MIT"
] | null | null | null |
from core.venom import messages
class UserUsernameAlreadyInUseException(Exception):
def __init__(self, username):
self.detail = messages["core.api.users.username_already_in_use"] % username
super(UserUsernameAlreadyInUseException, self).__init__(self.detail)
class UserEmailAlreadyInUseException(Exception):
def __init__(self, email):
self.detail = messages["core.api.users.email_already_in_use"] % email
super(UserEmailAlreadyInUseException, self).__init__(self.detail)
class UserNotFoundException(Exception):
def __init__(self, user_id):
self.detail = messages["core.api.users.user_not_found"] % user_id
super(UserNotFoundException, self).__init__(self.detail)
class UserOldPasswordCannotBeVerifiedException(Exception):
def __init__(self):
self.detail = messages["core.api.users.user_old_password_cannot_be_verified"]
super(UserOldPasswordCannotBeVerifiedException, self).__init__(self.detail)
class UserPasswordsCannotBeConfirmedException(Exception):
def __init__(self):
self.detail = messages["core.api.users.user_passwords_cannot_be_confirmed"]
super(UserPasswordsCannotBeConfirmedException, self).__init__(self.detail)
class UserGroupNotFoundException(Exception):
def __init__(self, user_group_id):
self.detail = messages["core.api.users.user_group_not_found"] % user_group_id
super(UserGroupNotFoundException, self).__init__(self.detail)
class UserGroupAlreadyAssignedWithRoleException(Exception):
def __init__(self, user_group_name, role_name):
self.detail = messages["core.api.users.user_group_already_assigned_with_role"] % (user_group_name, role_name)
super(UserGroupAlreadyAssignedWithRoleException, self).__init__(self.detail)
class UserGroupAlreadyInUseException(Exception):
def __init__(self, name):
self.detail = messages["core.api.users.user_group_already_in_use"] % name
super(UserGroupAlreadyInUseException, self).__init__(self.detail)
class UserAlreadyAssignedWithRoleException(Exception):
def __init__(self, username, role_name):
self.detail = messages["core.api.users.user_already_assigned_with_role"] % (username, role_name)
super(UserAlreadyAssignedWithRoleException, self).__init__(self.detail)
| 35.8
| 117
| 0.768801
|
8d07b918502535d6126720d9f9bf2e28d8b9f6c5
| 5,247
|
py
|
Python
|
neurom/apps/cli.py
|
musicinmybrain/NeuroM
|
76b8c557b81d4189b6c04598e62af3a1a67bebfd
|
[
"BSD-3-Clause"
] | null | null | null |
neurom/apps/cli.py
|
musicinmybrain/NeuroM
|
76b8c557b81d4189b6c04598e62af3a1a67bebfd
|
[
"BSD-3-Clause"
] | null | null | null |
neurom/apps/cli.py
|
musicinmybrain/NeuroM
|
76b8c557b81d4189b6c04598e62af3a1a67bebfd
|
[
"BSD-3-Clause"
] | null | null | null |
# Copyright (c) 2015, Ecole Polytechnique Federale de Lausanne, Blue Brain Project
# All rights reserved.
#
# This file is part of NeuroM <https://github.com/BlueBrain/NeuroM>
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the names of
# its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""The morph-tool command line launcher."""
import logging
import click
import matplotlib.pyplot as plt
from neurom.apps import morph_stats, morph_check
from neurom import load_neuron
from neurom.viewer import draw as pyplot_draw
@click.group()
@click.option('-v', '--verbose', count=True, default=0,
help='-v for WARNING, -vv for INFO, -vvv for DEBUG')
def cli(verbose):
"""The CLI entry point."""
level = (logging.WARNING, logging.INFO, logging.DEBUG)[min(verbose, 2)]
logging.basicConfig(level=level)
@cli.command()
@click.argument('input_file')
@click.option('--plane', type=click.Choice(['3d', 'xy', 'yx', 'yz', 'zy', 'xz', 'zx']),
default='3d')
@click.option('--backend', type=click.Choice(['plotly', 'matplotlib']),
default='matplotlib')
@click.option('-r', '--realistic-diameters/--no-realistic-diameters', default=False,
help='Scale diameters according to the plot axis\n'
'Warning: Only works with the matplotlib backend')
def view(input_file, plane, backend, realistic_diameters):
"""A simple neuron viewer."""
# pylint: disable=import-outside-toplevel
if backend == 'matplotlib':
kwargs = {
'mode': '3d' if plane == '3d' else '2d',
'realistic_diameters': realistic_diameters,
}
if plane != '3d':
kwargs['plane'] = plane
pyplot_draw(load_neuron(input_file), **kwargs)
plt.show()
else:
from neurom.view.plotly import draw as plotly_draw
plotly_draw(load_neuron(input_file), plane=plane)
@cli.command(short_help='Morphology statistics extractor, more details at'
'https://neurom.readthedocs.io/en/latest/morph_stats.html')
@click.argument('datapath', required=False)
@click.option('-C', '--config', type=click.Path(exists=True, dir_okay=False),
default=morph_stats.EXAMPLE_CONFIG, show_default=True,
help='Configuration File')
@click.option('-o', '--output', type=click.Path(exists=False, dir_okay=False),
help='Path to output file, if it ends in .json, a json file is created,'
'otherwise a csv file is created')
@click.option('-f', '--full-config', is_flag=True, default=False,
help='If passed then --config is ignored. Compute statistics for all neurite'
'types, all modes and all features')
@click.option('--as-population', is_flag=True, default=False,
help='If enabled the directory is treated as a population')
@click.option('-I', '--ignored-exceptions', help='Exception to ignore',
type=click.Choice(morph_stats.IGNORABLE_EXCEPTIONS.keys()))
def stats(datapath, config, output, full_config, as_population, ignored_exceptions):
"""Cli for apps/morph_stats."""
morph_stats.main(datapath, config, output, full_config, as_population, ignored_exceptions)
@cli.command(short_help='Perform checks on morphologies, more details at'
'https://neurom.readthedocs.io/en/latest/morph_check.html')
@click.argument('datapath')
@click.option('-C', '--config', type=click.Path(exists=True, dir_okay=False),
default=morph_check.EXAMPLE_CONFIG, show_default=True,
help='Configuration File')
@click.option('-o', '--output', type=click.Path(exists=False, dir_okay=False),
help='Path to output json summary file')
def check(datapath, config, output):
"""Cli for apps/morph_check."""
morph_check.main(datapath, config, output)
| 49.037383
| 94
| 0.693539
|
4654c770f2c695e38705046ada12231ddcd3ecce
| 799
|
py
|
Python
|
python/validParentheses.py
|
guozengxin/myleetcode
|
ed7ab4f716845646cf164a08f03ea342f60b14e1
|
[
"MIT"
] | null | null | null |
python/validParentheses.py
|
guozengxin/myleetcode
|
ed7ab4f716845646cf164a08f03ea342f60b14e1
|
[
"MIT"
] | null | null | null |
python/validParentheses.py
|
guozengxin/myleetcode
|
ed7ab4f716845646cf164a08f03ea342f60b14e1
|
[
"MIT"
] | null | null | null |
class Solution(object):
def isValid(self, s):
"""
:type s: str
:rtype: bool
"""
if len(s) & 1 != 0:
return False
stack = []
mymap = {'}':'{', ']':'[', ')': '('}
for c in s:
if c == '[' or c == '(' or c == '{':
stack.append(c)
else:
if len(stack) == 0:
return False
elif c not in mymap:
return False
elif mymap[c] != stack[-1]:
return False
stack.pop()
if len(stack) == 0:
return True
else:
return False
strs = [
'()',
'{()}[]',
'{(})',
]
solu = Solution()
for s in strs:
print s, solu.isValid(s)
| 21.594595
| 48
| 0.34418
|
181ead5f6730bd5224515626f5af0e4f6b490413
| 13,057
|
py
|
Python
|
dev_nb/nb_002.py
|
gurvindersingh/fastai_v1
|
18c6170f7fa852f6f24c03badb1bdb03f40c5be9
|
[
"Apache-2.0"
] | null | null | null |
dev_nb/nb_002.py
|
gurvindersingh/fastai_v1
|
18c6170f7fa852f6f24c03badb1bdb03f40c5be9
|
[
"Apache-2.0"
] | null | null | null |
dev_nb/nb_002.py
|
gurvindersingh/fastai_v1
|
18c6170f7fa852f6f24c03badb1bdb03f40c5be9
|
[
"Apache-2.0"
] | null | null | null |
#################################################
### THIS FILE WAS AUTOGENERATED! DO NOT EDIT! ###
#################################################
from nb_001b import *
import sys, PIL, matplotlib.pyplot as plt, itertools, math, random, collections, torch
import scipy.stats, scipy.special
from enum import Enum, IntEnum
from torch import tensor, Tensor, FloatTensor, LongTensor, ByteTensor, DoubleTensor, HalfTensor, ShortTensor
from operator import itemgetter, attrgetter
from numpy import cos, sin, tan, tanh, log, exp
from dataclasses import field
from functools import reduce
from collections import defaultdict, abc, namedtuple, Iterable
def find_classes(folder):
classes = [d for d in folder.iterdir()
if d.is_dir() and not d.name.startswith('.')]
assert(len(classes)>0)
return sorted(classes, key=lambda d: d.name)
def get_image_files(c):
return [o for o in list(c.iterdir())
if not o.name.startswith('.') and not o.is_dir()]
def pil2tensor(image):
arr = torch.ByteTensor(torch.ByteStorage.from_buffer(image.tobytes()))
arr = arr.view(image.size[1], image.size[0], -1)
arr = arr.permute(2,0,1)
return arr.float().div_(255)
def open_image(fn):
x = PIL.Image.open(fn).convert('RGB')
return pil2tensor(x)
class FilesDataset(Dataset):
def __init__(self, folder, classes=None):
self.fns, self.y = [], []
if classes is None: classes = [cls.name for cls in find_classes(folder)]
self.classes = classes
for i, cls in enumerate(classes):
fnames = get_image_files(folder/cls)
self.fns += fnames
self.y += [i] * len(fnames)
def __len__(self): return len(self.fns)
def __getitem__(self,i): return open_image(self.fns[i]),self.y[i]
def image2np(image): return image.cpu().permute(1,2,0).numpy()
def show_image(img, ax=None, figsize=(3,3), hide_axis=True):
if ax is None: fig,ax = plt.subplots(figsize=figsize)
ax.imshow(image2np(img))
if hide_axis: ax.axis('off')
def show_image_batch(dl, classes, rows=None, figsize=(12,15)):
x,y = next(iter(dl))
if rows is None: rows = int(math.sqrt(len(x)))
show_images(x[:rows*rows],y[:rows*rows],rows, classes)
def show_images(x,y,rows, classes, figsize=(9,9)):
fig, axs = plt.subplots(rows,rows,figsize=figsize)
for i, ax in enumerate(axs.flatten()):
show_image(x[i], ax)
ax.set_title(classes[y[i]])
plt.tight_layout()
def logit(x): return -(1/x-1).log()
def logit_(x): return (x.reciprocal_().sub_(1)).log_().neg_()
def uniform(low, high, size=None):
return random.uniform(low,high) if size is None else torch.FloatTensor(size).uniform_(low,high)
def log_uniform(low, high, size=None):
res = uniform(log(low), log(high), size)
return exp(res) if size is None else res.exp_()
def rand_bool(p, size=None): return uniform(0,1,size)<p
import inspect
from copy import copy,deepcopy
def get_default_args(func):
return {k: v.default
for k, v in inspect.signature(func).parameters.items()
if v.default is not inspect.Parameter.empty}
def listify(p=None, q=None):
if p is None: p=[]
elif not isinstance(p, Iterable): p=[p]
n = q if type(q)==int else 1 if q is None else len(q)
if len(p)==1: p = p * n
return p
class Transform():
_wrap=None
order=0
def __init__(self, func, order=None):
if order is not None: self.order=order
self.func=func
self.params = copy(func.__annotations__)
self.def_args = get_default_args(func)
def __call__(self, *args, p=1., **kwargs):
if args: return self.calc(*args, **kwargs)
else: return RandTransform(self, kwargs=kwargs, p=p)
def calc(self, x, *args, **kwargs):
if self._wrap: return getattr(x, self._wrap)(self.func, *args, **kwargs)
else: return self.func(x, *args, **kwargs)
@property
def name(self): return self.__class__.__name__
def __repr__(self): return f'{self.name} ({self.func.__name__})'
class TfmLighting(Transform): order,_wrap = 8,'lighting'
@dataclass
class RandTransform():
tfm:Transform
kwargs:dict
p:int=1.0
resolved:dict = field(default_factory=dict)
do_run:bool = True
def resolve(self):
self.resolved = {}
# for each param passed to tfm...
for k,v in self.kwargs.items():
# ...if it's annotated, call that fn...
if k in self.tfm.params:
rand_func = self.tfm.params[k]
self.resolved[k] = rand_func(*listify(v))
# ...otherwise use the value directly
else: self.resolved[k] = v
# use defaults for any args not filled in yet
for k,v in self.tfm.def_args.items():
if k not in self.resolved: self.resolved[k]=v
# anything left over must be callable without params
for k,v in self.tfm.params.items():
if k not in self.resolved: self.resolved[k]=v()
self.do_run = rand_bool(self.p)
@property
def order(self): return self.tfm.order
def __call__(self, x, *args, **kwargs):
return self.tfm(x, *args, **{**self.resolved, **kwargs}) if self.do_run else x
@TfmLighting
def brightness(x, change:uniform): return x.add_(scipy.special.logit(change))
@TfmLighting
def contrast(x, scale:log_uniform): return x.mul_(scale)
def resolve_tfms(tfms):
for f in listify(tfms): f.resolve()
def apply_tfms(tfms, x, do_resolve=True):
if not tfms: return x
tfms = listify(tfms)
if do_resolve: resolve_tfms(tfms)
x = Image(x.clone())
for tfm in tfms: x = tfm(x)
return x.px
def grid_sample_nearest(input, coords, padding_mode='zeros'):
if padding_mode=='border': coords.clamp(-1,1)
bs,ch,h,w = input.size()
sz = torch.tensor([w,h]).float()[None,None]
coords.add_(1).mul_(sz/2)
coords = coords[0].round_().long()
if padding_mode=='zeros':
mask = (coords[...,0] < 0) + (coords[...,1] < 0) + (coords[...,0] >= w) + (coords[...,1] >= h)
mask.clamp_(0,1)
coords[...,0].clamp_(0,w-1)
coords[...,1].clamp_(0,h-1)
result = input[...,coords[...,1],coords[...,0]]
if padding_mode=='zeros': result[...,mask] = result[...,mask].zero_()
return result
def grid_sample(x, coords, mode='bilinear', padding_mode='reflect'):
if padding_mode=='reflect': padding_mode='reflection'
if mode=='nearest': return grid_sample_nearest(x[None], coords, padding_mode)[0]
return F.grid_sample(x[None], coords, mode=mode, padding_mode=padding_mode)[0]
def affine_grid(size):
size = ((1,)+size)
N, C, H, W = size
grid = FloatTensor(N, H, W, 2)
linear_points = torch.linspace(-1, 1, W) if W > 1 else torch.Tensor([-1])
grid[:, :, :, 0] = torch.ger(torch.ones(H), linear_points).expand_as(grid[:, :, :, 0])
linear_points = torch.linspace(-1, 1, H) if H > 1 else torch.Tensor([-1])
grid[:, :, :, 1] = torch.ger(linear_points, torch.ones(W)).expand_as(grid[:, :, :, 1])
return grid
def affine_mult(c,m):
if m is None: return c
size = c.size()
c = c.view(-1,2)
c = torch.addmm(m[:2,2], c, m[:2,:2].t())
return c.view(size)
class Image():
def __init__(self, px):
self._px = px
self._logit_px=None
self._flow=None
self._affine_mat=None
self.sample_kwargs = {}
@property
def shape(self): return self._px.shape
def __repr__(self): return f'{self.__class__.__name__} ({self.px.shape})'
def refresh(self):
if self._logit_px is not None:
self._px = self._logit_px.sigmoid_()
self._logit_px = None
if self._affine_mat is not None or self._flow is not None:
self._px = grid_sample(self._px, self.flow, **self.sample_kwargs)
self.sample_kwargs = {}
self._flow = None
return self
@property
def px(self):
self.refresh()
return self._px
@px.setter
def px(self,v): self._px=v
@property
def flow(self):
if self._flow is None:
self._flow = affine_grid(self.shape)
if self._affine_mat is not None:
self._flow = affine_mult(self._flow,self._affine_mat)
self._affine_mat = None
return self._flow
@flow.setter
def flow(self,v): self._flow=v
def lighting(self, func, *args, **kwargs):
self.logit_px = func(self.logit_px, *args, **kwargs)
return self
def pixel(self, func, *args, **kwargs):
self.px = func(self.px, *args, **kwargs)
return self
def coord(self, func, *args, **kwargs):
self.flow = func(self.flow, self.shape, *args, **kwargs)
return self
def affine(self, func, *args, **kwargs):
m = func(*args, **kwargs)
self.affine_mat = self.affine_mat @ self._px.new(m)
return self
def set_sample(self, **kwargs):
self.sample_kwargs = kwargs
return self
def resize(self, size):
assert self._flow is None
if isinstance(size, int): size=(self.shape[0], size, size)
self.flow = affine_grid(size)
return self
@property
def affine_mat(self):
if self._affine_mat is None: self._affine_mat = self._px.new(torch.eye(3))
return self._affine_mat
@affine_mat.setter
def affine_mat(self,v): self._affine_mat=v
@property
def logit_px(self):
if self._logit_px is None: self._logit_px = logit_(self.px)
return self._logit_px
@logit_px.setter
def logit_px(self,v): self._logit_px=v
def show(self, ax=None, **kwargs): show_image(self.px, ax=ax, **kwargs)
def clone(self): return self.__class__(self.px.clone())
class TfmAffine(Transform): order,_wrap = 5,'affine'
class TfmPixel(Transform): order,_wrap = 10,'pixel'
@TfmAffine
def rotate(degrees:uniform):
angle = degrees * math.pi / 180
return [[cos(angle), -sin(angle), 0.],
[sin(angle), cos(angle), 0.],
[0. , 0. , 1.]]
def get_zoom_mat(sw, sh, c, r):
return [[sw, 0, c],
[0, sh, r],
[0, 0, 1.]]
@TfmAffine
def zoom(scale:uniform=1.0, row_pct:uniform=0.5, col_pct:uniform=0.5):
s = 1-1/scale
col_c = s * (2*col_pct - 1)
row_c = s * (2*row_pct - 1)
return get_zoom_mat(1/scale, 1/scale, col_c, row_c)
@TfmAffine
def squish(scale:uniform=1.0, row_pct:uniform=0.5, col_pct:uniform=0.5):
if scale <= 1:
col_c = (1-scale) * (2*col_pct - 1)
return get_zoom_mat(scale, 1, col_c, 0.)
else:
row_c = (1-1/scale) * (2*row_pct - 1)
return get_zoom_mat(1, 1/scale, 0., row_c)
@partial(Transform, order=TfmAffine.order-2)
def resize_image(x, size): return x.resize(size)
def apply_tfms(tfms, x, do_resolve=True, xtra=None, size=None, **kwargs):
if not tfms: return x
if not xtra: xtra={}
tfms = sorted(listify(tfms), key=lambda o: o.tfm.order)
if do_resolve: resolve_tfms(tfms)
x = Image(x.clone())
if kwargs: x.set_sample(**kwargs)
if size: x.resize(size)
for tfm in tfms:
if tfm.tfm in xtra: x = tfm(x, **xtra[tfm.tfm])
else: x = tfm(x)
return x.px
class TfmCoord(Transform): order,_wrap = 4,'coord'
@TfmCoord
def jitter(c, size, magnitude:uniform):
return c.add_((torch.rand_like(c)-0.5)*magnitude*2)
@TfmPixel
def flip_lr(x): return x.flip(2)
@partial(TfmPixel, order=-10)
def pad(x, padding, mode='reflect'):
return F.pad(x[None], (padding,)*4, mode=mode)[0]
@TfmPixel
def crop(x, size, row_pct:uniform=0.5, col_pct:uniform=0.5):
size = listify(size,2)
rows,cols = size
row = int((x.size(1)-rows+1) * row_pct)
col = int((x.size(2)-cols+1) * col_pct)
return x[:, row:row+rows, col:col+cols].contiguous()
def compute_zs_mat(sz, scale, squish, invert, row_pct, col_pct):
orig_ratio = math.sqrt(sz[2]/sz[1])
for s,r,i in zip(scale,squish, invert):
s,r = math.sqrt(s),math.sqrt(r)
if s * r <= 1 and s / r <= 1: #Test if we are completely inside the picture
w,h = (s/r, s*r) if i else (s*r,s/r)
w /= orig_ratio
h *= orig_ratio
col_c = (1-w) * (2*col_pct - 1)
row_c = (1-h) * (2*row_pct - 1)
return get_zoom_mat(w, h, col_c, row_c)
#Fallback, hack to emulate a center crop without cropping anything yet.
if orig_ratio > 1: return get_zoom_mat(1/orig_ratio**2, 1, 0, 0.)
else: return get_zoom_mat(1, orig_ratio**2, 0, 0.)
@TfmCoord
def zoom_squish(c, size, scale:uniform=1.0, squish:uniform=1.0, invert:rand_bool=False,
row_pct:uniform=0.5, col_pct:uniform=0.5):
#This is intended for scale, squish and invert to be of size 10 (or whatever) so that the transform
#can try a few zoom/squishes before falling back to center crop (like torchvision.RandomResizedCrop)
m = compute_zs_mat(size, scale, squish, invert, row_pct, col_pct)
return affine_mult(c, FloatTensor(m))
| 34.002604
| 108
| 0.618059
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.